diff --git a/applications/composer/backend/backend/settings.py b/applications/composer/backend/backend/settings.py index 2291ad09..ffc2bf20 100644 --- a/applications/composer/backend/backend/settings.py +++ b/applications/composer/backend/backend/settings.py @@ -126,7 +126,7 @@ LANGUAGE_CODE = "en-us" -TIME_ZONE = "CET" +TIME_ZONE = "Europe/Paris" USE_I18N = True @@ -412,10 +412,8 @@ if DEBUG: INSTALLED_APPS += [ "debug_toolbar", - 'silk', ] MIDDLEWARE += [ - 'silk.middleware.SilkyMiddleware', "debug_toolbar.middleware.DebugToolbarMiddleware", ] DEBUG_TOOLBAR_CONFIG = { diff --git a/applications/composer/backend/backend/urls.py b/applications/composer/backend/backend/urls.py index e90ec0be..c2c46dbd 100644 --- a/applications/composer/backend/backend/urls.py +++ b/applications/composer/backend/backend/urls.py @@ -55,5 +55,4 @@ urlpatterns += [ # Debug toolbar path("__debug__/", include("debug_toolbar.urls")), - path('silk/', include('silk.urls', namespace='silk')) ] diff --git a/applications/composer/backend/composer/admin.py b/applications/composer/backend/composer/admin.py index 9563559c..17069fc8 100644 --- a/applications/composer/backend/composer/admin.py +++ b/applications/composer/backend/composer/admin.py @@ -21,12 +21,15 @@ from composer.models import ( AlertType, ConnectivityStatementTriple, + ConnectivityStatementText, + ConnectivityStatementAnatomicalEntity, Phenotype, Relationship, Sex, PopulationSet, ConnectivityStatement, Provenance, + ExpertConsultant, ExportBatch, Note, Profile, @@ -45,7 +48,7 @@ Region, AnatomicalEntityIntersection, AnatomicalEntity, - CSState + CSState, ) @@ -64,6 +67,11 @@ class ProvenanceInline(admin.StackedInline): extra = 1 +class ExpertConsultantInline(admin.StackedInline): + model = ExpertConsultant + extra = 1 + + class SynonymInline(admin.StackedInline): model = Synonym extra = 1 @@ -104,9 +112,50 @@ class StatementAlertInline(admin.StackedInline): class RelationshipAdmin(admin.ModelAdmin): - list_display = ("title", "predicate_name", "predicate_uri", "type", "order") + list_display = ("title", "predicate_name", "predicate_uri", "type", "order", "has_custom_code") ordering = ("order",) search_fields = ("title", "predicate_name", "predicate_uri") + fieldsets = ( + (None, { + 'fields': ('title', 'predicate_name', 'predicate_uri', 'type', 'order') + }), + ('Custom Ingestion Code', { + 'classes': ('collapse',), + 'fields': ('custom_ingestion_code',), + 'description': ( + 'Add custom Python code to extract data from NeuroDM during ingestion. ' + ), + }), + ) + + def get_form(self, request, obj=None, **kwargs): + form = super().get_form(request, obj, **kwargs) + if 'custom_ingestion_code' in form.base_fields: + form.base_fields['custom_ingestion_code'].widget = forms.Textarea(attrs={ + 'rows': 15, + 'cols': 100, + 'style': 'font-family: monospace; font-size: 12px;' + }) + form.base_fields['custom_ingestion_code'].help_text = mark_safe( + "Optional Python code to extract data from NeuroDM for this relationship during ingestion.
" + "The code has access to:
" + "• fc: dict with neuron properties (id, label, species, phenotype, etc.)
" + "• fc[\"_neuron\"]: the NeuroDM neuron object

" + "The code must define a result variable with the output:
" + "• For TRIPLE relationships: list of dicts [{'name': str, 'uri': str}, ...]
" + "• For TEXT relationships: list of strings or single string
" + "• For ANATOMICAL_ENTITY relationships:
" + "  - Simple entities: list of URI strings ['http://purl.obolibrary.org/obo/UBERON_0001234', ...]
" + "  - Region-layer pairs: list of dicts [{'region': 'region_uri', 'layer': 'layer_uri'}, ...]
" + "  - Mixed: list combining both formats
" + "  - Note: Region-layer pairs respect the 'update_anatomical_entities' flag

" + "Errors are logged to the ingestion anomalies file and the relationship will be skipped." + ) + return form + + @admin.display(description="Has Custom Code", boolean=True) + def has_custom_code(self, obj): + return bool(obj.custom_ingestion_code and obj.custom_ingestion_code.strip()) class TripleAdmin(admin.ModelAdmin): list_display = ("name", "uri", "relationship") @@ -117,7 +166,9 @@ class TripleAdmin(admin.ModelAdmin): def get_form(self, request, obj=None, **kwargs): form = super().get_form(request, obj, **kwargs) if "relationship" in form.base_fields: - form.base_fields["relationship"].queryset = Relationship.objects.exclude(type=RelationshipType.TEXT) + form.base_fields["relationship"].queryset = Relationship.objects.exclude( + type__in=[RelationshipType.TEXT, RelationshipType.ANATOMICAL_MULTI] + ) return form class ConnectivityStatementInline(nested_admin.NestedStackedInline): @@ -267,8 +318,52 @@ class DestinationInline(admin.TabularInline): class ConnectivityStatementTripleInline(admin.TabularInline): model = ConnectivityStatementTriple extra = 1 - autocomplete_fields = ("relationship", "triple") - fields = ("relationship", "triple", "free_text") + autocomplete_fields = ("relationship",) + raw_id_fields = ("triples",) + fields = ("relationship", "triples") + + def get_form(self, request, obj=None, **kwargs): + form = super().get_form(request, obj, **kwargs) + if "relationship" in form.base_fields: + # Only show triple relationship types + form.base_fields["relationship"].queryset = Relationship.objects.filter( + type__in=[RelationshipType.TRIPLE_SINGLE, RelationshipType.TRIPLE_MULTI] + ) + return form + + +class ConnectivityStatementTextInline(admin.TabularInline): + model = ConnectivityStatementText + extra = 1 + autocomplete_fields = ("relationship",) + fields = ("relationship", "text") + + def get_form(self, request, obj=None, **kwargs): + form = super().get_form(request, obj, **kwargs) + if "relationship" in form.base_fields: + # Only show text relationship types + form.base_fields["relationship"].queryset = Relationship.objects.filter( + type=RelationshipType.TEXT + ) + return form + + +class ConnectivityStatementAnatomicalEntityInline(admin.TabularInline): + model = ConnectivityStatementAnatomicalEntity + extra = 1 + autocomplete_fields = ("relationship",) + raw_id_fields = ("anatomical_entities",) + fields = ("relationship", "anatomical_entities") + + def get_form(self, request, obj=None, **kwargs): + form = super().get_form(request, obj, **kwargs) + if "relationship" in form.base_fields: + # Only show anatomical entity relationship types + form.base_fields["relationship"].queryset = Relationship.objects.filter( + type__in=[RelationshipType.ANATOMICAL_MULTI] + ) + return form + class ConnectivityStatementAdmin( SortableAdminBase, FSMTransitionMixin, admin.ModelAdmin @@ -279,25 +374,24 @@ class ConnectivityStatementAdmin( readonly_fields = ( "state", "curie_id", - "has_statement_been_exported", "reference_uri", + "population_index" ) - exclude = ("journey_path", "statement_prefix", "statement_suffix", "population_index") + exclude = ("journey_path", "statement_prefix", "statement_suffix", ) autocomplete_fields = ("sentence", "origins") date_hierarchy = "modified_date" list_display = ( "sentence", - "pmid", - "pmcid", "short_ks", + "population_set_name", + "population_index", "tag_list", "state", - "has_notes", "owner", ) - list_display_links = ("sentence", "pmid", "pmcid", "short_ks", "state") - list_filter = ("state", "owner", "tags__tag") - list_select_related = ("sentence", "origins", "destinations") + list_display_links = ("sentence", "short_ks", "state") + list_filter = ("state", "population", "owner", "tags__tag") + list_select_related = ("sentence", "population", "owner", "origins", "destinations") search_fields = ( "sentence__title", "sentence__text", @@ -309,8 +403,9 @@ class ConnectivityStatementAdmin( fieldsets = () - inlines = (ProvenanceInline, NoteConnectivityStatementInline, - ViaInline, DestinationInline, StatementAlertInline, ConnectivityStatementTripleInline) + inlines = (ProvenanceInline, ExpertConsultantInline, NoteConnectivityStatementInline, + ViaInline, DestinationInline, StatementAlertInline, ConnectivityStatementTripleInline, + ConnectivityStatementTextInline, ConnectivityStatementAnatomicalEntityInline) def _filter_admin_transitions(self, transitions_generator): """ @@ -337,17 +432,9 @@ def delete_queryset(self, request, queryset): def short_ks(self, obj): return str(obj) - @admin.display(description="PMID") - def pmid(self, obj): - return obj.sentence.pmid - - @admin.display(description="PMCID") - def pmcid(self, obj): - return obj.sentence.pmcid - - @admin.display(description="REFERENCE") - def reference(self, obj): - return str(obj) + @admin.display(description="Population Set") + def population_set_name(self, obj): + return obj.population.name if obj.population else "-" class ExportBatchAdmin(admin.ModelAdmin): @@ -391,6 +478,45 @@ class IngestSentenceForm(forms.Form): file = forms.FileField(label="CSV file") +class IngestStatementsForm(forms.Form): + """Form for configuring connectivity statement ingestion parameters""" + update_upstream = forms.BooleanField( + required=False, + initial=False, + label="Update upstream statements", + help_text="Set this flag to update upstream statements." + ) + update_anatomical_entities = forms.BooleanField( + required=False, + initial=False, + label="Update anatomical entities", + help_text="Set this flag to try move anatomical entities to specific layer, region." + ) + disable_overwrite = forms.BooleanField( + required=False, + initial=False, + label="Disable overwrite", + help_text="Set this flag to prevent overwriting existing statements." + ) + full_imports = forms.CharField( + required=False, + widget=forms.Textarea(attrs={'rows': 3, 'placeholder': 'Enter URIs separated by commas or new lines'}), + label="Full imports", + help_text="List of full imports to include in the ingestion (comma or newline separated)." + ) + label_imports = forms.CharField( + required=False, + widget=forms.Textarea(attrs={'rows': 3, 'placeholder': 'Enter labels separated by commas or new lines'}), + label="Label imports", + help_text="List of label imports to include in the ingestion (comma or newline separated)." + ) + population_file = forms.FileField( + required=False, + label="Population file", + help_text="Text file containing population URIs (one per line). Only statements matching these URIs will be processed." + ) + + # Custom view for ingesting sentences from a CSV file def ingest_sentences_view(request): output = None @@ -423,11 +549,51 @@ def ingest_sentences_view(request): return render(request, "admin/ingest_sentences.html", context) +# Custom view for downloading ingestion log files +def download_logs_view(request): + """ + Admin page with links to download ingestion log files. + """ + context = admin.site.each_context(request) + context.update({ + "title": "Download Ingestion Logs", + "anomalies_url": reverse('composer-api:ingestion-logs') + '?log_type=anomalies', + "ingested_url": reverse('composer-api:ingestion-logs') + '?log_type=ingested', + }) + return render(request, "admin/download_logs.html", context) + + +# Custom view for ingesting connectivity statements +def ingest_statements_view(request): + """ + Admin page for configuring and triggering connectivity statement ingestion. + """ + context = admin.site.each_context(request) + if request.method == "POST": + form = IngestStatementsForm(request.POST, request.FILES) + if form.is_valid(): + context.update({ + "form": form, + "title": "Ingest Connectivity Statements", + }) + return render(request, "admin/ingest_statements.html", context) + else: + form = IngestStatementsForm() + + context.update({ + "form": form, + "title": "Ingest Connectivity Statements", + }) + return render(request, "admin/ingest_statements.html", context) + + def custom_admin_urls(original_get_urls): def get_urls(): urls = original_get_urls() custom_urls = [ path('ingest-sentences/', admin.site.admin_view(ingest_sentences_view), name='ingest-sentences'), + path('ingest-statements/', admin.site.admin_view(ingest_statements_view), name='ingest-statements'), + path('download-logs/', admin.site.admin_view(download_logs_view), name='download-logs'), ] return custom_urls + urls return get_urls diff --git a/applications/composer/backend/composer/api/serializers.py b/applications/composer/backend/composer/api/serializers.py index 01c19c9f..84c35649 100644 --- a/applications/composer/backend/composer/api/serializers.py +++ b/applications/composer/backend/composer/api/serializers.py @@ -6,17 +6,21 @@ from drf_writable_nested.serializers import WritableNestedModelSerializer from rest_framework import serializers -from ..enums import BulkActionType, RelationshipType, SentenceState, CSState +from ..enums import RelationshipType, SentenceState, CSState +from ..pure_enums import BulkActionType from ..models import ( AlertType, AnatomicalEntity, ConnectivityStatementTriple, + ConnectivityStatementText, + ConnectivityStatementAnatomicalEntity, Phenotype, ProjectionPhenotype, Relationship, Sex, ConnectivityStatement, Provenance, + ExpertConsultant, Note, Profile, Sentence, @@ -380,6 +384,22 @@ class ProvenanceCreateSerializer(serializers.Serializer): uri = serializers.CharField(required=True) +class ExpertConsultantSerializer(serializers.ModelSerializer): + """Expert Consultant""" + + uri = serializers.CharField() + connectivity_statement_id = serializers.IntegerField(required=True) + + class Meta: + model = ExpertConsultant + fields = ("id", "uri", "connectivity_statement_id") + + +class ExpertConsultantCreateSerializer(serializers.Serializer): + """Serializer for creating expert consultant via request body""" + uri = serializers.CharField(required=True) + + class SentenceConnectivityStatement(serializers.ModelSerializer): """Connectivity Statement""" @@ -672,56 +692,96 @@ class Meta: class ConnectivityStatementTripleSerializer(serializers.ModelSerializer): - value = serializers.SerializerMethodField() + """Serializer for triple-based relationships (single/multi select from triples)""" connectivity_statement = serializers.PrimaryKeyRelatedField( queryset=ConnectivityStatement.objects.all() ) relationship = serializers.PrimaryKeyRelatedField( - queryset=Relationship.objects.all() + queryset=Relationship.objects.filter( + type__in=[RelationshipType.TRIPLE_SINGLE, RelationshipType.TRIPLE_MULTI] + ) + ) + triples = serializers.PrimaryKeyRelatedField( + many=True, + queryset=Triple.objects.all(), + write_only=True ) class Meta: model = ConnectivityStatementTriple - fields = ["id", "connectivity_statement", "relationship", "value"] + fields = ["id", "connectivity_statement", "relationship", "triples"] - def get_value(self, obj): - if obj.relationship.type == RelationshipType.TEXT: - return obj.free_text - if obj.triple: - return obj.triple.id - return None + def to_representation(self, instance): + """Return full triple objects in read operations""" + representation = super().to_representation(instance) + representation['triples'] = [triple.id for triple in instance.triples.all()] + return representation def validate(self, data): - request = self.context.get("request") - if request and request.method in ("POST", "PUT", "PATCH"): - incoming_value = request.data.get("value", None) + relationship = data.get("relationship") or getattr(self.instance, "relationship", None) + triples = data.get("triples", []) + + if relationship: + # Validate that all triples belong to the relationship + for triple in triples: + if triple.relationship_id != relationship.id: + raise serializers.ValidationError( + {"triples": f"Triple '{triple.name}' does not belong to the selected relationship."} + ) + + return data - relationship = data.get("relationship") or getattr(self.instance, "relationship", None) - if not relationship: - raise serializers.ValidationError({"relationship": "This field is required to process value."}) - if relationship.type == RelationshipType.TEXT: - if not isinstance(incoming_value, str): - raise serializers.ValidationError({"value": "Must be a string for text relationship."}) - data["free_text"] = incoming_value - data["triple"] = None +class ConnectivityStatementTextSerializer(serializers.ModelSerializer): + """Serializer for text-based relationships (free text area)""" - else: - try: - triple_id = int(incoming_value) - except (ValueError, TypeError): - raise serializers.ValidationError({"value": "Must be an integer (or stringified integer) triple ID."}) + connectivity_statement = serializers.PrimaryKeyRelatedField( + queryset=ConnectivityStatement.objects.all() + ) + relationship = serializers.PrimaryKeyRelatedField( + queryset=Relationship.objects.filter(type=RelationshipType.TEXT) + ) + + class Meta: + model = ConnectivityStatementText + fields = ["id", "connectivity_statement", "relationship", "text"] - try: - triple = Triple.objects.get(id=triple_id, relationship=relationship) - except Triple.DoesNotExist: - raise serializers.ValidationError({"value": "Invalid triple ID for this relationship."}) + def validate_text(self, value): + if not value or not value.strip(): + raise serializers.ValidationError("Text cannot be empty.") + return value - data["triple"] = triple - data["free_text"] = None - return data +class ConnectivityStatementAnatomicalEntitySerializer(serializers.ModelSerializer): + """Serializer for anatomical entity-based relationships""" + + connectivity_statement = serializers.PrimaryKeyRelatedField( + queryset=ConnectivityStatement.objects.all() + ) + relationship = serializers.PrimaryKeyRelatedField( + queryset=Relationship.objects.filter( + type__in=[RelationshipType.ANATOMICAL_MULTI] + ) + ) + anatomical_entities = serializers.PrimaryKeyRelatedField( + many=True, + queryset=AnatomicalEntity.objects.all(), + write_only=True + ) + + class Meta: + model = ConnectivityStatementAnatomicalEntity + fields = ["id", "connectivity_statement", "relationship", "anatomical_entities"] + + def to_representation(self, instance): + """Return full anatomical entity objects in read operations""" + representation = super().to_representation(instance) + representation['anatomical_entities'] = AnatomicalEntitySerializer( + instance.anatomical_entities.all(), + many=True + ).data + return representation class ConnectivityStatementSerializer(BaseConnectivityStatementSerializer): @@ -734,6 +794,7 @@ class ConnectivityStatementSerializer(BaseConnectivityStatementSerializer): population_id = serializers.IntegerField(required=False, allow_null=True) species = SpecieSerializer(many=True, read_only=False, required=False) provenances = ProvenanceSerializer(source="provenance_set", many=True, read_only=False, required=False) + expert_consultants = ExpertConsultantSerializer(source="expertconsultant_set", many=True, read_only=False, required=False) origins = AnatomicalEntitySerializer(many=True, required=False) vias = ViaSerializerDetails(source="via_set", many=True, read_only=False, required=False) destinations = DestinationSerializerDetails(many=True, required=False) @@ -758,6 +819,8 @@ class ConnectivityStatementSerializer(BaseConnectivityStatementSerializer): required=False, read_only=True ) statement_triples = serializers.SerializerMethodField() + statement_texts = serializers.SerializerMethodField() + statement_anatomical_entities = serializers.SerializerMethodField() def get_available_transitions(self, instance) -> list[CSState]: @@ -782,22 +845,33 @@ def get_statement_preview(self, instance): def get_errors(self, instance) -> List: return get_connectivity_errors(instance) - def get_statement_triples(self, instance): - triples = instance.statement_triples.all() + """Get triple-based relationships grouped by relationship ID""" + statement_triples = instance.connectivitystatementtriple_set.all() grouped = {} - for triple in triples: - relationship = triple.relationship.id - serialized = ConnectivityStatementTripleSerializer(triple).data - - if triple.relationship.type == RelationshipType.MULTI: - grouped.setdefault(relationship, []).append(serialized) - else: - grouped[relationship] = serialized + for statement_triple in statement_triples: + relationship = statement_triple.relationship.id + serialized = ConnectivityStatementTripleSerializer(statement_triple).data + + # Since triples is now always M2M, return consistent structure + grouped[relationship] = serialized return grouped + def get_statement_texts(self, instance): + """Get text-based relationships""" + texts = instance.connectivitystatementtext_set.all() + return {text.relationship.id: ConnectivityStatementTextSerializer(text).data for text in texts} + + def get_statement_anatomical_entities(self, instance): + """Get anatomical entity-based relationships""" + anatomical_entities = instance.connectivitystatementanatomicalentity_set.all() + return { + ae.relationship.id: ConnectivityStatementAnatomicalEntitySerializer(ae).data + for ae in anatomical_entities + } + def to_representation(self, instance): """ Convert the model instance `forward_connection` field to serialized data. @@ -831,6 +905,7 @@ class Meta(BaseConnectivityStatementSerializer.Meta): "knowledge_statement", "tags", "provenances", + "expert_consultants", "owner", "owner_id", "state", @@ -864,6 +939,8 @@ class Meta(BaseConnectivityStatementSerializer.Meta): "graph_rendering_state", "statement_alerts", "statement_triples", + "statement_texts", + "statement_anatomical_entities", ) diff --git a/applications/composer/backend/composer/api/urls.py b/applications/composer/backend/composer/api/urls.py index c16b889f..511ccba1 100644 --- a/applications/composer/backend/composer/api/urls.py +++ b/applications/composer/backend/composer/api/urls.py @@ -4,6 +4,8 @@ from .views import ( AnatomicalEntityViewSet, ConnectivityStatementTripleViewSet, + ConnectivityStatementTextViewSet, + ConnectivityStatementAnatomicalEntityViewSet, PhenotypeViewSet, ProjectionPhenotypeViewSet, ConnectivityStatementViewSet, @@ -16,6 +18,7 @@ ProfileViewSet, SentenceViewSet, SpecieViewSet, + IngestionLogFileView, TagViewSet, ViaViewSet, SexViewSet, @@ -50,6 +53,8 @@ router.register(r"statementAlert", StatementAlertViewSet, basename="statementAlert") router.register(r"relationship", RelationshipViewSet, basename="relationship") router.register(r"connectivityStatementTriple", ConnectivityStatementTripleViewSet, basename="ConnectivityStatementTriple") +router.register(r"connectivityStatementText", ConnectivityStatementTextViewSet, basename="ConnectivityStatementText") +router.register(r"connectivityStatementAnatomicalEntity", ConnectivityStatementAnatomicalEntityViewSet, basename="ConnectivityStatementAnatomicalEntity") # router.register(r"json", JsonViewSet, basename="json") # The API URLs are now determined automatically by the router. @@ -59,4 +64,5 @@ path("jsonschemas/", jsonschemas, name="jsonschemas"), path("predicate-mapping/", PredicateMappingViewSet.as_view(), name="predicate-mapping"), path("knowledge-statement/", KnowledgeStatementViewSet.as_view(), name="knowledge-statement"), + path("ingestion-logs/", IngestionLogFileView.as_view(), name="ingestion-logs"), ] diff --git a/applications/composer/backend/composer/api/views.py b/applications/composer/backend/composer/api/views.py index 84d7d9b8..a58e8d69 100644 --- a/applications/composer/backend/composer/api/views.py +++ b/applications/composer/backend/composer/api/views.py @@ -17,7 +17,7 @@ from composer.services.export.helpers.predicate_mapping import PredicateToDBMapping from composer.services.dynamic_schema_service import inject_dynamic_relationship_schema from composer.services import bulk_service -from composer.enums import BulkActionType +from composer.pure_enums import BulkActionType from composer.services.state_services import ( ConnectivityStatementStateService, SentenceStateService, @@ -42,6 +42,8 @@ BulkActionResponseSerializer, ChangeStatusSerializer, ConnectivityStatementTripleSerializer, + ConnectivityStatementTextSerializer, + ConnectivityStatementAnatomicalEntitySerializer, PhenotypeSerializer, ProjectionPhenotypeSerializer, ConnectivityStatementSerializer, @@ -56,6 +58,8 @@ ViaSerializer, ProvenanceSerializer, ProvenanceCreateSerializer, + ExpertConsultantSerializer, + ExpertConsultantCreateSerializer, SexSerializer, PopulationSetSerializer, ConnectivityStatementUpdateSerializer, @@ -73,7 +77,6 @@ ) from ..models import ( AlertType, - AnatomicalEntityMeta, AnatomicalEntity, Phenotype, ProjectionPhenotype, @@ -87,10 +90,13 @@ Tag, Via, Provenance, + ExpertConsultant, Sex, PopulationSet, Destination, ConnectivityStatementTriple, + ConnectivityStatementText, + ConnectivityStatementAnatomicalEntity, ) @@ -122,7 +128,7 @@ class TagMixin(viewsets.GenericViewSet): ], request=None, ) - @action(detail=True, methods=["post"], url_path="add_tag/(?P\w+)") + @action(detail=True, methods=["post"], url_path=r"add_tag/(?P\w+)") def add_tag(self, request, pk=None, tag_id=None): instance = self.get_object() tag_instance = Tag.objects.get(id=tag_id) @@ -140,7 +146,7 @@ def add_tag(self, request, pk=None, tag_id=None): ], request=None, ) - @action(detail=True, methods=["post"], url_path="del_tag/(?P\w+)") + @action(detail=True, methods=["post"], url_path=r"del_tag/(?P\w+)") def del_tag(self, request, pk=None, tag_id=None): instance = self.get_object() tag_instance = Tag.objects.get(id=tag_id) @@ -182,7 +188,7 @@ def add_provenance(self, request, pk=None): @action( detail=True, methods=["delete"], - url_path="del_provenance/(?P\d+)", + url_path=r"del_provenance/(?P\d+)", ) def del_provenance(self, request, pk=None, provenance_id=None): count, deleted = Provenance.objects.filter( @@ -194,6 +200,52 @@ def del_provenance(self, request, pk=None, provenance_id=None): return Response(self.get_serializer(instance).data) +class ExpertConsultantMixin( + viewsets.GenericViewSet, +): + @extend_schema( + request=ExpertConsultantCreateSerializer, + responses={200: "ConnectivityStatement updated successfully"}, + ) + @action(detail=True, methods=["post"], url_path="add_expert_consultant") + def add_expert_consultant(self, request, pk=None): + serializer = ExpertConsultantCreateSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + uri = serializer.validated_data['uri'] + expert_consultant, created = ExpertConsultant.objects.get_or_create( + connectivity_statement_id=pk, + uri=uri, + ) + instance = self.get_object() + return Response(self.get_serializer(instance).data) + + @extend_schema( + parameters=[ + OpenApiParameter( + "expert_consultant_id", + OpenApiTypes.INT, + location=OpenApiParameter.PATH, + required=True, + ) + ], + request=None, + ) + @action( + detail=True, + methods=["delete"], + url_path=r"del_expert_consultant/(?P\d+)", + ) + def del_expert_consultant(self, request, pk=None, expert_consultant_id=None): + count, deleted = ExpertConsultant.objects.filter( + id=expert_consultant_id, connectivity_statement_id=pk + ).delete() + if count == 0: + raise Http404 + instance = self.get_object() + return Response(self.get_serializer(instance).data) + + class SpecieMixin( viewsets.GenericViewSet, ): @@ -208,7 +260,7 @@ class SpecieMixin( ], request=None, ) - @action(detail=True, methods=["post"], url_path="add_specie/(?P\w+)") + @action(detail=True, methods=["post"], url_path=r"add_specie/(?P\w+)") def add_specie(self, request, pk=None, specie_id=None): instance = self.get_object() specie_instance = Specie.objects.get(id=specie_id) @@ -226,7 +278,7 @@ def add_specie(self, request, pk=None, specie_id=None): ], request=None, ) - @action(detail=True, methods=["post"], url_path="del_specie/(?P\w+)") + @action(detail=True, methods=["post"], url_path=r"del_specie/(?P\w+)") def del_specie(self, request, pk=None, specie_id=None): instance = self.get_object() specie_instance = Specie.objects.get(id=specie_id) @@ -235,7 +287,7 @@ def del_specie(self, request, pk=None, specie_id=None): class TransitionMixin(viewsets.GenericViewSet): - @action(detail=True, methods=["post"], url_path="do_transition/(?P\w+)") + @action(detail=True, methods=["post"], url_path=r"do_transition/(?P\w+)") def transition(self, request, pk=None, transition=None): instance = self.service(self.get_object()).do_transition( transition, user=request.user, request=request @@ -503,6 +555,7 @@ class AlertTypeViewSet(viewsets.ReadOnlyModelViewSet): class ConnectivityStatementViewSet( ProvenanceMixin, + ExpertConsultantMixin, SpecieMixin, TagMixin, TransitionMixin, @@ -825,10 +878,12 @@ class RelationshipViewSet(viewsets.ReadOnlyModelViewSet): class ConnectivityStatementTripleViewSet(viewsets.ModelViewSet): """ - ConnectivityStatementTriple: + ConnectivityStatementTriple: Manage triple-based relationships """ - queryset = ConnectivityStatementTriple.objects.select_related("connectivity_statement", "relationship", "triple") + queryset = ConnectivityStatementTriple.objects.select_related( + "connectivity_statement", "relationship" + ).prefetch_related("triples") serializer_class = ConnectivityStatementTripleSerializer permission_classes = [IsOwnerOfConnectivityStatementOrReadOnly] @@ -840,6 +895,43 @@ def get_queryset(self): return qs +class ConnectivityStatementTextViewSet(viewsets.ModelViewSet): + """ + ConnectivityStatementText: Manage text-based relationships + """ + + queryset = ConnectivityStatementText.objects.select_related("connectivity_statement", "relationship") + serializer_class = ConnectivityStatementTextSerializer + permission_classes = [IsOwnerOfConnectivityStatementOrReadOnly] + + def get_queryset(self): + qs = super().get_queryset() + connectivity_statement_id = self.request.query_params.get("connectivity_statement_id") + if connectivity_statement_id: + qs = qs.filter(connectivity_statement_id=connectivity_statement_id) + return qs + + +class ConnectivityStatementAnatomicalEntityViewSet(viewsets.ModelViewSet): + """ + ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + """ + + queryset = ConnectivityStatementAnatomicalEntity.objects.select_related( + "connectivity_statement", "relationship" + ).prefetch_related("anatomical_entities") + serializer_class = ConnectivityStatementAnatomicalEntitySerializer + permission_classes = [IsOwnerOfConnectivityStatementOrReadOnly] + + def get_queryset(self): + qs = super().get_queryset() + connectivity_statement_id = self.request.query_params.get("connectivity_statement_id") + if connectivity_statement_id: + qs = qs.filter(connectivity_statement_id=connectivity_statement_id) + return qs + + + @extend_schema( responses=OpenApiTypes.OBJECT, ) @@ -852,6 +944,7 @@ def jsonschemas(request): DestinationSerializer, TagSerializer, ProvenanceSerializer, + ExpertConsultantSerializer, SpecieSerializer, NoteSerializer, StatementAlertSerializer, @@ -877,3 +970,82 @@ def jsonschemas(request): ret = ret.replace("\u2028", "\\u2028").replace("\u2029", "\\u2029") data = bytes(ret.encode("utf-8")) return HttpResponse(data) + + +class IngestionLogFileView(APIView): + """ + API endpoint to download ingestion log files. + Staff-only access to download CSV log files generated during the ingestion process. + """ + permission_classes = [permissions.IsAdminUser] + + @extend_schema( + parameters=[ + OpenApiParameter( + name='log_type', + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description='Type of log file to download. Options: "anomalies" or "ingested"', + enum=['anomalies', 'ingested'], + required=True, + ), + ], + responses={ + 200: OpenApiTypes.BINARY, + 404: OpenApiTypes.OBJECT, + }, + description='Download ingestion log files as CSV. Returns anomalies log or ingested statements log.', + ) + def get(self, request): + """ + Download log file as CSV. + + Query Parameters: + - log_type: 'anomalies' or 'ingested' + + Returns: + - CSV file download + """ + import os + from django.http import FileResponse + from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH + + log_type = request.query_params.get('log_type') + + if not log_type: + return Response( + {'error': 'log_type query parameter is required'}, + status=status.HTTP_400_BAD_REQUEST + ) + + if log_type == 'anomalies': + log_path = INGESTION_ANOMALIES_LOG_PATH + filename = 'ingestion_anomalies.csv' + elif log_type == 'ingested': + log_path = INGESTION_INGESTED_LOG_PATH + filename = 'ingested_statements.csv' + else: + return Response( + {'error': 'Invalid log_type. Use "anomalies" or "ingested"'}, + status=status.HTTP_400_BAD_REQUEST + ) + + # Check if file exists + if not os.path.exists(log_path): + return Response( + {'error': f'Log file not found: {log_path}'}, + status=status.HTTP_404_NOT_FOUND + ) + + # Serve the file for download + try: + file_handle = open(log_path, 'rb') + response = FileResponse(file_handle, content_type='text/csv') + response['Content-Disposition'] = f'attachment; filename="{filename}"' + return response + + except Exception as e: + return Response( + {'error': f'Error reading log file: {str(e)}'}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) diff --git a/applications/composer/backend/composer/constants.py b/applications/composer/backend/composer/constants.py new file mode 100644 index 00000000..f3e1bc99 --- /dev/null +++ b/applications/composer/backend/composer/constants.py @@ -0,0 +1,24 @@ +""" +Configuration constants for the composer application. +""" +import os +from django.conf import settings + + +def _get_media_path(*paths): + """Helper to construct paths relative to MEDIA_ROOT""" + return os.path.join(settings.MEDIA_ROOT, *paths) + + +# Base directory for all ingestion-related files +INGESTION_BASE_DIR = _get_media_path("ingestion") + +# Directory for temporary files (cleaned up periodically by cleanup_old_files command) +INGESTION_TEMP_DIR = os.path.join(INGESTION_BASE_DIR, "ingestion_temp") + +# Log file paths for ingestion (persistent files available via IngestionLogFileView) +INGESTION_ANOMALIES_LOG_PATH = os.path.join(INGESTION_BASE_DIR, "ingestion_anomalies_log.csv") +INGESTION_INGESTED_LOG_PATH = os.path.join(INGESTION_BASE_DIR, "ingested_log.csv") + +# Cleanup settings +DEFAULT_CLEANUP_DAYS = 30 diff --git a/applications/composer/backend/composer/enums.py b/applications/composer/backend/composer/enums.py index 08554794..18aa41fe 100644 --- a/applications/composer/backend/composer/enums.py +++ b/applications/composer/backend/composer/enums.py @@ -1,6 +1,10 @@ -from enum import Enum from django.db import models +from composer.pure_enums import ( + CircuitType as PureCircuitType, + SentenceState as PureSentenceState, + CSState as PureCSState, +) # Create your enums here. @@ -15,14 +19,16 @@ class Projection(models.TextChoices): BI = "BI", "bilateral" -# todo: motor and sensory should move to phenotype options per csv -# also anaxonic is not set as option in the csv class CircuitType(models.TextChoices): - SENSORY = "SENSORY", "Sensory" - MOTOR = "MOTOR", "Motor" - INTRINSIC = "INTRINSIC", "Intrinsic" - PROJECTION = "PROJECTION", "Projection" - ANAXONIC = "ANAXONIC", "Anaxonic" + """ + Django TextChoices wrapper for CircuitType. + Uses values from pure_enums.CircuitType as the single source of truth. + """ + SENSORY = PureCircuitType.SENSORY.value, "Sensory" + MOTOR = PureCircuitType.MOTOR.value, "Motor" + INTRINSIC = PureCircuitType.INTRINSIC.value, "Intrinsic" + PROJECTION = PureCircuitType.PROJECTION.value, "Projection" + ANAXONIC = PureCircuitType.ANAXONIC.value, "Anaxonic" class ViaType(models.TextChoices): @@ -40,27 +46,34 @@ class DestinationType(models.TextChoices): class SentenceState(models.TextChoices): - OPEN = "open" - NEEDS_FURTHER_REVIEW = "needs_further_review" - COMPOSE_LATER = "compose_later" - READY_TO_COMPOSE = "ready_to_compose" - COMPOSE_NOW = "compose_now" - COMPLETED = "completed" - EXCLUDED = "excluded" + """ + Django TextChoices wrapper for SentenceState. + Uses values from pure_enums.SentenceState as the single source of truth. + """ + OPEN = PureSentenceState.OPEN.value + NEEDS_FURTHER_REVIEW = PureSentenceState.NEEDS_FURTHER_REVIEW.value + COMPOSE_LATER = PureSentenceState.COMPOSE_LATER.value + READY_TO_COMPOSE = PureSentenceState.READY_TO_COMPOSE.value + COMPOSE_NOW = PureSentenceState.COMPOSE_NOW.value + COMPLETED = PureSentenceState.COMPLETED.value + EXCLUDED = PureSentenceState.EXCLUDED.value class CSState(models.TextChoices): - # Connectivity Statement States - DRAFT = "draft" - COMPOSE_NOW = "compose_now" - IN_PROGRESS = "in_progress" - TO_BE_REVIEWED = "to_be_reviewed" - REVISE = "revise" - REJECTED = "rejected" - NPO_APPROVED = "npo_approved" - EXPORTED = "exported" - DEPRECATED = "deprecated" - INVALID = "invalid" + """ + Django TextChoices wrapper for CSState. + Uses values from pure_enums.CSState as the single source of truth. + """ + DRAFT = PureCSState.DRAFT.value + COMPOSE_NOW = PureCSState.COMPOSE_NOW.value + IN_PROGRESS = PureCSState.IN_PROGRESS.value + TO_BE_REVIEWED = PureCSState.TO_BE_REVIEWED.value + REVISE = PureCSState.REVISE.value + REJECTED = PureCSState.REJECTED.value + NPO_APPROVED = PureCSState.NPO_APPROVED.value + EXPORTED = PureCSState.EXPORTED.value + DEPRECATED = PureCSState.DEPRECATED.value + INVALID = PureCSState.INVALID.value class NoteType(models.TextChoices): @@ -75,18 +88,8 @@ class MetricEntity(models.TextChoices): CONNECTIVITY_STATEMENT = "connectivity statement" -class ConnectivityErrors(Enum): - INVALID_FORWARD_CONNECTION = "Invalid forward connection" - - -class BulkActionType(str, Enum): - ASSIGN_USER = "assign_user" - ASSIGN_TAG = "assign_tag" - WRITE_NOTE = "write_note" - CHANGE_STATUS = "change_status" - ASSIGN_POPULATION_SET = "assign_population_set" - class RelationshipType(models.TextChoices): - SINGLE = "single", "Single select" - MULTI = "multi", "Multi select" + TRIPLE_SINGLE = "triple_single", "Triple - Single select" + TRIPLE_MULTI = "triple_multi", "Triple - Multi select" + ANATOMICAL_MULTI = "anatomical_multi", "Anatomical Entity - Multi select" TEXT = "text", "Text area" \ No newline at end of file diff --git a/applications/composer/backend/composer/management/commands/README_reassign_population_indices.md b/applications/composer/backend/composer/management/commands/README_reassign_population_indices.md new file mode 100644 index 00000000..c56d6039 --- /dev/null +++ b/applications/composer/backend/composer/management/commands/README_reassign_population_indices.md @@ -0,0 +1,181 @@ +# Reassign Population Indices Command + +## Overview + +This Django management command reassigns `population_index` values to connectivity statements based on patterns found in their `curie_id` field. It processes all population sets and their associated exported statements. + +## Purpose + +The command is designed to: +1. Extract hypothetical population indices from `curie_id` fields +2. Handle conflicts where multiple statements have the same hypothetical index +3. Assign sequential indices to statements that couldn't get a hypothetical index +4. Update the `last_used_index` on population sets + +## Expected curie_id Pattern + +The command expects `curie_id` values to follow this pattern: +``` +neuron type {population_name} {population_index} +``` + +For example: +- `neuron type rat 1` +- `neuron type mouse 42` + +## How It Works + +### Phase 1: Analysis +- Retrieves all statements with `has_statement_been_exported=True` for each population +- Extracts the hypothetical population index from each statement's `curie_id` +- Tracks statements that don't match the pattern or have no `curie_id` + +### Phase 2: Index Assignment +- For unique hypothetical indices: Assigns directly +- For conflicts (multiple statements with same index): + - Statement with smallest `id` (created earliest) gets the index + - Other statements go into a "bag" for later assignment +- Logs all assignments and conflicts + +### Phase 3: Bag Processing +- Statements in the bag (conflicts or no pattern match) get assigned sequential indices +- Starting from the last used index + 1 +- Maintains creation order (sorted by statement `id`) + +### Phase 4: Database Update +- Updates `population_index` on each statement +- Updates `last_used_index` on the population set +- All changes wrapped in a transaction + +## Usage + +### Basic Usage +```bash +python manage.py reassign_population_indices +``` + +### Dry Run (Preview Changes) +```bash +python manage.py reassign_population_indices --dry-run +``` + +### Process Specific Population +```bash +python manage.py reassign_population_indices --population "rat" +``` + +### Custom Log File +```bash +python manage.py reassign_population_indices --output-file /path/to/logfile.log +``` + +### Combined Options +```bash +python manage.py reassign_population_indices --dry-run --population "mouse" --output-file mouse_dry_run.log +``` + +## Command Options + +| Option | Description | Default | +|--------|-------------|---------| +| `--dry-run` | Preview changes without applying them | False | +| `--output-file` | Path to log file | `population_index_reassignment_YYYY-MM-DD_HH-MM-SS.log` | +| `--population` | Process only a specific population set by name | All populations | + +## Log Output + +The command generates a detailed log file containing: + +1. **Per-Statement Details**: + - Statement ID + - Hypothetical index extracted from `curie_id` + - Assigned population index + - Whether it was a conflict winner/loser or bag assignment + +2. **Special Cases**: + - Statements with missing `curie_id` + - Statements where pattern couldn't be matched + - The actual `curie_id` value for debugging + +3. **Summary**: + - Total population sets processed + - Total statements processed + - Total statements reassigned + - Total conflicts resolved + +### Example Log Output + +``` +Population Index Reassignment Report +Generated: 2025-11-05 10:30:45 +Mode: LIVE +================================================================================ + +Processing 2 population set(s)... + +Processing Population Set: rat +-------------------------------------------------------------------------------- +Found 5 exported statement(s) + +Phase 1: Analyzing curie_id patterns... + Statement 101: Found hypothesis index 1 from curie_id + Statement 102: Found hypothesis index 2 from curie_id + Statement 103: Found hypothesis index 2 from curie_id + Statement 104: WARNING - Could not extract index from curie_id: 'invalid format' + Statement 105: WARNING - No curie_id present + +Phase 2: Assigning population indices... + Statement 101: Assigned index 1 + Statement 102: Assigned index 2 (conflict winner) + Statement 103: Moved to bag (conflict loser, had same hypothesis index 2) + +Phase 3: Assigning indices to 2 statement(s) in bag... + Statement 103: Assigned index 3 (from bag) + Statement 104: Assigned index 4 (from bag) + +Phase 4: Updating database... + Statement 101: No change needed (already 1) + Statement 102: No change needed (already 2) + Statement 103: Updated from 2 to 3 + Statement 104: Updated from None to 4 + Population rat: Updated last_used_index from 2 to 4 + +Special Cases (curie_id issues): + Statement 104: Pattern not matched + curie_id: 'invalid format' + Statement 105: Missing curie_id + +Completed population 'rat': + - Statements processed: 5 + - Statements reassigned: 2 + - Conflicts resolved: 1 + - Special cases: 2 + +================================================================================ +SUMMARY +================================================================================ +Total population sets processed: 1 +Total statements processed: 5 +Total statements reassigned: 2 +Total conflicts resolved: 1 +``` + +## Important Notes + +1. **Conflict Resolution**: When multiple statements have the same hypothetical index, the statement with the smallest `id` (earliest creation) takes precedence. + +2. **Transaction Safety**: All database updates are wrapped in a transaction, so either all changes succeed or none are applied. + +3. **Dry Run First**: Always run with `--dry-run` first to preview changes before applying them. + +4. **Special Cases**: Statements without a `curie_id` or with non-matching patterns are logged as special cases and assigned sequential indices. + +5. **Population Name Matching**: The command uses case-insensitive matching for population names and handles special regex characters in population names. + +## When to Use This Command + +- After importing/ingesting statements with `curie_id` values +- To resolve duplicate population indices +- To correct population index assignments after data migrations +- To ensure sequential and conflict-free population indices + diff --git a/applications/composer/backend/composer/management/commands/cleanup_old_files.py b/applications/composer/backend/composer/management/commands/cleanup_old_files.py new file mode 100644 index 00000000..89164a13 --- /dev/null +++ b/applications/composer/backend/composer/management/commands/cleanup_old_files.py @@ -0,0 +1,98 @@ +import os +import time +from datetime import datetime, timedelta +from django.core.management.base import BaseCommand +from composer.constants import INGESTION_TEMP_DIR, DEFAULT_CLEANUP_DAYS + + +class Command(BaseCommand): + help = "Cleans up old temporary ingestion files" + + def add_arguments(self, parser): + parser.add_argument( + '--days', + type=int, + default=DEFAULT_CLEANUP_DAYS, + help=f'Delete files older than this many days (default: {DEFAULT_CLEANUP_DAYS})', + ) + parser.add_argument( + '--dry-run', + action='store_true', + help='Show what would be deleted without actually deleting', + ) + + def handle(self, *args, **options): + days = options['days'] + dry_run = options['dry_run'] + + if dry_run: + self.stdout.write(self.style.WARNING(f"DRY RUN MODE: No files will be deleted")) + + self.stdout.write(f"Cleaning up temporary files older than {days} days...") + + # Calculate cutoff time + cutoff_time = time.time() - (days * 24 * 60 * 60) + cutoff_date = datetime.fromtimestamp(cutoff_time).strftime('%Y-%m-%d %H:%M:%S') + self.stdout.write(f"Cutoff date: {cutoff_date}") + + # Only clean the temporary directory (log files in INGESTION_BASE_DIR are persistent) + directory = INGESTION_TEMP_DIR + + total_deleted = 0 + total_size = 0 + + if not os.path.exists(directory): + self.stdout.write(f"Directory does not exist: {directory}") + else: + self.stdout.write(f"\nScanning directory: {directory}") + deleted_count, deleted_size = self._clean_directory(directory, cutoff_time, dry_run) + total_deleted += deleted_count + total_size += deleted_size + + size_mb = total_size / (1024 * 1024) + + if dry_run: + self.stdout.write(self.style.SUCCESS( + f"\nDRY RUN: Would delete {total_deleted} file(s), freeing {size_mb:.2f} MB" + )) + else: + self.stdout.write(self.style.SUCCESS( + f"\nDeleted {total_deleted} file(s), freed {size_mb:.2f} MB" + )) + + def _clean_directory(self, directory, cutoff_time, dry_run): + """Clean files in a directory that are older than cutoff_time""" + deleted_count = 0 + deleted_size = 0 + + try: + for filename in os.listdir(directory): + filepath = os.path.join(directory, filename) + + # Skip if not a file + if not os.path.isfile(filepath): + continue + + # Check file age + file_mtime = os.path.getmtime(filepath) + if file_mtime < cutoff_time: + file_size = os.path.getsize(filepath) + file_date = datetime.fromtimestamp(file_mtime).strftime('%Y-%m-%d %H:%M:%S') + + if dry_run: + self.stdout.write(f" Would delete: {filename} ({file_date}, {file_size} bytes)") + else: + try: + os.remove(filepath) + self.stdout.write(f" Deleted: {filename} ({file_date}, {file_size} bytes)") + except Exception as e: + self.stderr.write(f" Error deleting {filename}: {e}") + continue + + deleted_count += 1 + deleted_size += file_size + + except Exception as e: + self.stderr.write(f"Error scanning directory {directory}: {e}") + + return deleted_count, deleted_size diff --git a/applications/composer/backend/composer/management/commands/get_composer_data.py b/applications/composer/backend/composer/management/commands/get_composer_data.py new file mode 100644 index 00000000..3beaaf40 --- /dev/null +++ b/applications/composer/backend/composer/management/commands/get_composer_data.py @@ -0,0 +1,47 @@ +import json +import time + +from django.core.management.base import BaseCommand +from composer.services.cs_ingestion.cs_ingestion_services import get_composer_data + + +class Command(BaseCommand): + help = "Get composer data (custom relationships and alert types) from database and save to file" + + def add_arguments(self, parser): + parser.add_argument( + '--output_filepath', + type=str, + required=True, + help='Path to output JSON file where data will be saved', + ) + + def handle(self, *args, **options): + output_filepath = options['output_filepath'] + + start_time = time.time() + + try: + # Fetch composer data + self.stdout.write("Fetching composer data from database...") + composer_data = get_composer_data() + + # Save to JSON file + with open(output_filepath, 'w', encoding='utf-8') as f: + json.dump(composer_data, f, indent=2) + + end_time = time.time() + duration = end_time - start_time + + self.stdout.write(self.style.SUCCESS( + f"Successfully saved {len(composer_data['custom_relationships'])} custom relationships " + f"and {len(composer_data['statement_alert_uris'])} alert URIs to {output_filepath} in {duration:.2f} seconds." + )) + + except Exception as e: + end_time = time.time() + duration = end_time - start_time + self.stderr.write(self.style.ERROR( + f"Failed to get data after {duration:.2f} seconds: {e}" + )) + diff --git a/applications/composer/backend/composer/management/commands/ingest_to_database.py b/applications/composer/backend/composer/management/commands/ingest_to_database.py new file mode 100644 index 00000000..c246281d --- /dev/null +++ b/applications/composer/backend/composer/management/commands/ingest_to_database.py @@ -0,0 +1,126 @@ +import json +import time + +from django.core.management.base import BaseCommand +from composer.services.cs_ingestion.cs_ingestion_services import ingest_to_database +from composer.services.cs_ingestion.logging_service import LoggerService +from composer.services.cs_ingestion.models import convert_statement_from_json +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH + + +class Command(BaseCommand): + help = "Step 2: Ingest pre-processed statements into the database" + + def add_arguments(self, parser): + parser.add_argument( + '--input_filepath', + type=str, + required=True, + help='Path to input JSON file containing processed statements from Step 1.', + ) + parser.add_argument( + '--update_upstream', + action='store_true', + help='Set this flag to update upstream statements.', + ) + parser.add_argument( + '--update_anatomical_entities', + action='store_true', + help='Set this flag to try move anatomical entities to specific layer, region.', + ) + parser.add_argument( + '--disable_overwrite', + action='store_true', + help='Set this flag to disable overwriting existing statements.', + ) + parser.add_argument( + '--force_state_transition', + action='store_true', + help='Set this flag to allow state transitions from any state (e.g., TO_BE_REVIEWED -> EXPORTED). Use when ingesting a pre-filtered population.', + ) + parser.add_argument( + '--anomalies_csv_input', + type=str, + help='Path to input anomalies CSV file from Step 1 (will be merged with new anomalies)', + ) + + def handle(self, *args, **options): + input_filepath = options['input_filepath'] + update_upstream = options['update_upstream'] + update_anatomical_entities = options['update_anatomical_entities'] + disable_overwrite = options['disable_overwrite'] + force_state_transition = options['force_state_transition'] + anomalies_csv_input = options.get('anomalies_csv_input') + + # Load statements from JSON file + try: + if not input_filepath.endswith('.json'): + self.stderr.write(self.style.ERROR( + "Input file must have .json extension" + )) + return + + with open(input_filepath, 'r', encoding='utf-8') as f: + statements_list = json.load(f) + + # Convert JSON-serialized statements back to object format + self.stdout.write("Converting JSON statements to object format...") + statements_list = [convert_statement_from_json(stmt) for stmt in statements_list] + + self.stdout.write(f"Loaded {len(statements_list)} statements from {input_filepath}") + except FileNotFoundError: + self.stderr.write(self.style.ERROR(f"Input file not found: {input_filepath}")) + return + except Exception as e: + self.stderr.write(self.style.ERROR(f"Error loading statements: {e}")) + return + + start_time = time.time() + + # Create logger service with explicit paths from constants + logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH + ) + + # Load any previous anomalies from the CSV file (e.g., from process_neurondm step) + if anomalies_csv_input: + logger_service.load_anomalies_from_json(anomalies_csv_input) + + try: + # Step 2: Ingest to database + self.stdout.write("Ingesting statements to database...") + success = ingest_to_database( + statements_list=statements_list, + update_upstream=update_upstream, + update_anatomical_entities=update_anatomical_entities, + disable_overwrite=disable_overwrite, + force_state_transition=force_state_transition, + logger_service_param=logger_service, + ) + + end_time = time.time() + duration = end_time - start_time + + + # First convert JSON anomalies to CSV format + logger_service.write_anomalies_to_file() + self.stdout.write(f"Saved {len(logger_service.anomalies)} total anomalies to {logger_service.anomalies_log_path}") + logger_service.write_ingested_statements_to_file(statements_list) + self.stdout.write(f"Saved ingested statements log to {logger_service.ingested_log_path}") + + if success: + self.stdout.write(self.style.SUCCESS( + f"Ingestion completed successfully in {duration:.2f} seconds." + )) + else: + self.stderr.write(self.style.ERROR( + f"Ingestion failed after {duration:.2f} seconds. Check logs for details." + )) + + except Exception as e: + end_time = time.time() + duration = end_time - start_time + self.stderr.write(self.style.ERROR( + f"Ingestion failed after {duration:.2f} seconds: {e}" + )) diff --git a/applications/composer/backend/composer/management/commands/migrate_expert_consultants.py b/applications/composer/backend/composer/management/commands/migrate_expert_consultants.py new file mode 100644 index 00000000..149ab863 --- /dev/null +++ b/applications/composer/backend/composer/management/commands/migrate_expert_consultants.py @@ -0,0 +1,211 @@ +from datetime import datetime +from django.core.management.base import BaseCommand, CommandError +from django.db import transaction +from django.db.models import Prefetch +from composer.models import ConnectivityStatement, StatementAlert, AlertType, ExpertConsultant + + +class Command(BaseCommand): + help = 'Migrate expert consultant data from statement alerts to ExpertConsultant model' + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', + action='store_true', + help='Show what would be migrated without making changes', + ) + parser.add_argument( + '--batch-size', + type=int, + default=100, + help='Batch size for processing statement alerts (default: 100)', + ) + parser.add_argument( + '--predicate', + type=str, + default='expertConsultant', + help='Alert type predicate to look for (default: expertConsultant)', + ) + + def handle(self, *args, **options): + dry_run = options['dry_run'] + batch_size = options['batch_size'] + predicate = options['predicate'] + + self.stdout.write(self.style.SUCCESS('Starting expert consultant migration...')) + + if dry_run: + self.stdout.write(self.style.WARNING('DRY RUN MODE - No changes will be made')) + + # Statistics + total_alerts_found = 0 + total_created = 0 + total_failed = 0 + total_alerts_deleted = 0 + failed_migrations = [] + + try: + # First, check if the alert type exists + try: + alert_type = AlertType.objects.get(predicate=predicate) + self.stdout.write(f'Found alert type: {alert_type.name} (predicate: {predicate})') + except AlertType.DoesNotExist: + self.stdout.write( + self.style.WARNING( + f'No alert type found with predicate "{predicate}". Nothing to migrate.' + ) + ) + return + + # Get all statement alerts with the expert consultant predicate + # Use select_related to minimize database queries + alerts_queryset = StatementAlert.objects.filter( + alert_type=alert_type + ).select_related( + 'connectivity_statement', + 'alert_type' + ).order_by('connectivity_statement_id') + + total_alerts_found = alerts_queryset.count() + self.stdout.write(f'Found {total_alerts_found} expert consultant alerts to migrate...') + + if total_alerts_found == 0: + self.stdout.write( + self.style.SUCCESS('No expert consultant alerts found. Migration complete!') + ) + return + + # Process alerts in batches + processed_count = 0 + + # Group alerts by connectivity statement for efficient processing + current_batch = [] + + for alert in alerts_queryset.iterator(chunk_size=batch_size): + current_batch.append(alert) + processed_count += 1 + + # Process batch when it reaches the batch size or at the end + if len(current_batch) >= batch_size or processed_count == total_alerts_found: + batch_results = self._process_batch(current_batch, dry_run) + + total_created += batch_results['created'] + total_failed += batch_results['failed'] + total_alerts_deleted += batch_results['deleted'] + failed_migrations.extend(batch_results['failures']) + + # Clear the batch + current_batch = [] + + # Progress indicator + self.stdout.write(f'Processed {processed_count}/{total_alerts_found} alerts...') + + # Summary + self.stdout.write( + self.style.SUCCESS( + f'\nMigration completed!\n' + f'Total alerts found: {total_alerts_found}\n' + f'Expert consultants created: {total_created}\n' + f'Alerts deleted: {total_alerts_deleted}\n' + f'Failed migrations: {total_failed}' + ) + ) + + if failed_migrations: + self.stdout.write(self.style.ERROR('\nFailed migrations:')) + for failure in failed_migrations: + self.stdout.write( + f" Alert ID {failure['alert_id']}: {failure['error']}" + ) + + if dry_run and total_created > 0: + self.stdout.write( + self.style.WARNING( + '\nRun without --dry-run to apply these changes.' + ) + ) + + except Exception as e: + raise CommandError(f'Error during migration: {e}') + + def _process_batch(self, alerts, dry_run): + """Process a batch of alerts and create ExpertConsultant entries""" + results = { + 'created': 0, + 'failed': 0, + 'deleted': 0, + 'failures': [] + } + + for alert in alerts: + try: + # Validate that we have a URI in the text field + if not alert.text or not alert.text.strip(): + results['failed'] += 1 + results['failures'].append({ + 'alert_id': alert.id, + 'error': 'Alert has no URI text' + }) + continue + + uri = alert.text.strip() + + # Check if this expert consultant already exists to avoid duplicates + existing = ExpertConsultant.objects.filter( + connectivity_statement=alert.connectivity_statement, + uri=uri + ).exists() + + if existing: + self.stdout.write( + self.style.WARNING( + f'ExpertConsultant already exists for CS {alert.connectivity_statement.id} ' + f'with URI "{uri}". Skipping creation but will delete alert.' + ) + ) + # Delete the alert since the data is already in the new model + if not dry_run: + alert.delete() + results['deleted'] += 1 + continue + + # Create the ExpertConsultant entry + if not dry_run: + with transaction.atomic(): + expert_consultant = ExpertConsultant.objects.create( + connectivity_statement=alert.connectivity_statement, + uri=uri + ) + + self.stdout.write( + f'Created ExpertConsultant ID {expert_consultant.id} for ' + f'CS {alert.connectivity_statement.id} with URI: {uri}' + ) + + # Delete the alert only after successful creation + alert.delete() + + results['created'] += 1 + results['deleted'] += 1 + else: + # In dry-run mode, just log what would happen + self.stdout.write( + f'Would create ExpertConsultant for CS {alert.connectivity_statement.id} ' + f'with URI: {uri}' + ) + results['created'] += 1 + results['deleted'] += 1 + + except Exception as e: + results['failed'] += 1 + results['failures'].append({ + 'alert_id': alert.id, + 'error': str(e) + }) + self.stdout.write( + self.style.ERROR( + f'Failed to migrate alert ID {alert.id}: {e}' + ) + ) + + return results diff --git a/applications/composer/backend/composer/management/commands/reassign_population_indices.py b/applications/composer/backend/composer/management/commands/reassign_population_indices.py new file mode 100644 index 00000000..2b93d600 --- /dev/null +++ b/applications/composer/backend/composer/management/commands/reassign_population_indices.py @@ -0,0 +1,304 @@ +import re +from datetime import datetime +from collections import defaultdict +from django.core.management.base import BaseCommand +from django.db import transaction +from composer.models import PopulationSet, ConnectivityStatement + + +class Command(BaseCommand): + help = 'Reassign population indices based on curie_id patterns for exported statements' + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', + action='store_true', + help='Show what would be changed without making changes', + ) + parser.add_argument( + '--output-file', + type=str, + default=None, + help='Output log file (default: population_index_reassignment_YYYY-MM-DD_HH-MM-SS.log)', + ) + parser.add_argument( + '--population', + type=str, + default=None, + help='Process only a specific population set by name', + ) + + def handle(self, *args, **options): + dry_run = options['dry_run'] + population_filter = options['population'] + + # Generate output filename with timestamp if not provided + if options['output_file']: + output_file = options['output_file'] + else: + timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + mode = "dry_run_" if dry_run else "" + output_file = f'population_index_reassignment_{mode}{timestamp}.log' + + self.stdout.write(self.style.SUCCESS('Starting population index reassignment...')) + + if dry_run: + self.stdout.write(self.style.WARNING('DRY RUN MODE - No changes will be made')) + + try: + with open(output_file, 'w', encoding='utf-8') as log_file: + self.log(log_file, f"Population Index Reassignment Report") + self.log(log_file, f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + self.log(log_file, f"Mode: {'DRY RUN' if dry_run else 'LIVE'}") + self.log(log_file, "=" * 80) + self.log(log_file, "") + + # Get population sets to process + population_sets = PopulationSet.objects.all() + if population_filter: + population_sets = population_sets.filter(name=population_filter.lower()) + if not population_sets.exists(): + raise ValueError(f"Population set '{population_filter}' not found") + + total_populations = population_sets.count() + self.log(log_file, f"Processing {total_populations} population set(s)...") + self.log(log_file, "") + + total_statements_processed = 0 + total_statements_reassigned = 0 + total_conflicts = 0 + + for population in population_sets: + result = self.process_population(population, log_file, dry_run) + total_statements_processed += result['processed'] + total_statements_reassigned += result['reassigned'] + total_conflicts += result['conflicts'] + + # Summary + self.log(log_file, "") + self.log(log_file, "=" * 80) + self.log(log_file, "SUMMARY") + self.log(log_file, "=" * 80) + self.log(log_file, f"Total population sets processed: {total_populations}") + self.log(log_file, f"Total statements processed: {total_statements_processed}") + self.log(log_file, f"Total statements reassigned: {total_statements_reassigned}") + self.log(log_file, f"Total conflicts resolved: {total_conflicts}") + + self.stdout.write( + self.style.SUCCESS( + f'\nProcess completed!\n' + f'Population sets processed: {total_populations}\n' + f'Statements processed: {total_statements_processed}\n' + f'Statements reassigned: {total_statements_reassigned}\n' + f'Conflicts resolved: {total_conflicts}\n' + f'Log written to: {output_file}' + ) + ) + + if dry_run: + self.stdout.write( + self.style.WARNING( + 'Run without --dry-run to apply these changes.' + ) + ) + + except Exception as e: + self.stdout.write(self.style.ERROR(f'Error: {e}')) + raise + + def process_population(self, population, log_file, dry_run): + """Process all exported statements for a given population set""" + self.log(log_file, f"Processing Population Set: {population.name}") + self.log(log_file, "-" * 80) + + # Get all exported statements for this population + statements = ConnectivityStatement.objects.filter( + population=population, + has_statement_been_exported=True + ).order_by('id') + + statement_count = statements.count() + self.log(log_file, f"Found {statement_count} exported statement(s)") + + if statement_count == 0: + self.log(log_file, "No exported statements to process") + self.log(log_file, "") + return {'processed': 0, 'reassigned': 0, 'conflicts': 0} + + # Maps to track population indices + hypothesis_index_map = {} # hypothesis_index -> list of statement objects + statement_assignments = {} # statement_id -> assigned index + bag = [] # Statements that couldn't get hypothesis index or have conflicts + special_cases = [] # Track cases where hypothesis index couldn't be retrieved + + # Create a dictionary for fast statement lookup by id + statements_dict = {s.id: s for s in statements} + + # Pattern to extract population index from curie_id + # Expected format: "neuron type {population_name} {population_index}" + pattern = rf"neuron type {re.escape(population.name)}\s+(\d+)" + compiled_pattern = re.compile(pattern, re.IGNORECASE) + + # First pass: Extract hypothesis indices and detect conflicts + self.log(log_file, "") + self.log(log_file, "Phase 1: Analyzing curie_id patterns...") + + for statement in statements: + hypothesis_index = None + + if statement.curie_id: + match = compiled_pattern.search(statement.curie_id) + if match: + hypothesis_index = int(match.group(1)) + self.log(log_file, f" Statement {statement.curie_id}: Found hypothesis index {hypothesis_index} from curie_id") + else: + self.log(log_file, f" WARNING - Could not extract index from curie_id: '{statement.curie_id}'") + special_cases.append({ + 'statement_id': statement.id, + 'curie_id': statement.curie_id, + 'reason': 'Pattern not matched' + }) + else: + self.log(log_file, f" Statement {statement.id}: WARNING - No curie_id present") + special_cases.append({ + 'statement_id': statement.id, + 'curie_id': None, + 'reason': 'Missing curie_id' + }) + + if hypothesis_index is not None: + if hypothesis_index not in hypothesis_index_map: + hypothesis_index_map[hypothesis_index] = [] + hypothesis_index_map[hypothesis_index].append(statement) + else: + bag.append(statement) + + # Second pass: Assign indices, handling conflicts + self.log(log_file, "") + self.log(log_file, "Phase 2: Assigning population indices...") + + conflicts_resolved = 0 + used_indices = set() + + # Process statements with hypothesis indices + for hypothesis_index in sorted(hypothesis_index_map.keys()): + statements_list = hypothesis_index_map[hypothesis_index] + + if len(statements_list) == 1: + # No conflict - assign directly + statement = statements_list[0] + statement_assignments[statement.id] = hypothesis_index + used_indices.add(hypothesis_index) + self.log(log_file, f" Statement {statement.curie_id}: Assigned index {hypothesis_index}") + else: + # Conflict - earliest statement (smallest id) gets the index + statements_list.sort(key=lambda s: s.id) + winner = statements_list[0] + losers = statements_list[1:] + + statement_assignments[winner.id] = hypothesis_index + used_indices.add(hypothesis_index) + conflicts_resolved += len(losers) + + self.log(log_file, f" Statement {winner.curie_id}: Assigned index {hypothesis_index} (conflict winner)") + for loser in losers: + self.log(log_file, f" Statement {loser.curie_id}: Moved to bag (conflict loser, had same hypothesis index {hypothesis_index})") + bag.append(loser) + + # Third pass: Assign indices to bag statements sequentially + if bag: + self.log(log_file, "") + self.log(log_file, f"Phase 3: Assigning indices to {len(bag)} statement(s) in bag...") + + # Find the next available index + if used_indices: + next_index = max(used_indices) + 1 + else: + next_index = 1 + + # Sort bag by statement id to maintain consistent ordering + bag.sort(key=lambda s: s.id) + + for statement in bag: + # Find next unused index + while next_index in used_indices: + next_index += 1 + + statement_assignments[statement.id] = next_index + used_indices.add(next_index) + self.log(log_file, f" Statement {statement.curie_id}: Assigned index {next_index} (from bag)") + next_index += 1 + + # Apply changes to database + statements_reassigned = 0 + + if not dry_run: + self.log(log_file, "") + self.log(log_file, "Phase 4: Updating database...") + + with transaction.atomic(): + for statement_id, new_index in statement_assignments.items(): + statement = statements_dict[statement_id] + old_index = statement.population_index + + if old_index != new_index: + statement.population_index = new_index + statement.save(update_fields=['population_index']) + statements_reassigned += 1 + self.log(log_file, f" Statement {statement.curie_id}: Updated from {old_index} to {new_index}") + else: + self.log(log_file, f" Statement {statement.curie_id}: No change needed (already {new_index})") + + # Update population's last_used_index + if used_indices: + new_last_index = max(used_indices) + old_last_index = population.last_used_index + population.last_used_index = new_last_index + population.save(update_fields=['last_used_index']) + self.log(log_file, f" Population {population.name}: Updated last_used_index from {old_last_index} to {new_last_index}") + else: + # In dry run, just report what would change + self.log(log_file, "") + self.log(log_file, "Phase 4: Database changes (DRY RUN - not applied)...") + + for statement_id, new_index in statement_assignments.items(): + statement = statements_dict[statement_id] + old_index = statement.population_index + + if old_index != new_index: + statements_reassigned += 1 + self.log(log_file, f" Statement {statement_id}: Would update from {old_index} to {new_index}") + else: + self.log(log_file, f" Statement {statement_id}: No change needed (already {new_index})") + + if used_indices: + new_last_index = max(used_indices) + self.log(log_file, f" Population {population.name}: Would update last_used_index to {new_last_index}") + + # Report special cases + if special_cases: + self.log(log_file, "") + self.log(log_file, "Special Cases (curie_id issues):") + for case in special_cases: + self.log(log_file, f" Statement {case['statement_id']}: {case['reason']}") + if case['curie_id']: + self.log(log_file, f" curie_id: '{case['curie_id']}'") + + self.log(log_file, "") + self.log(log_file, f"Completed population '{population.name}':") + self.log(log_file, f" - Statements processed: {statement_count}") + self.log(log_file, f" - Statements reassigned: {statements_reassigned}") + self.log(log_file, f" - Conflicts resolved: {conflicts_resolved}") + self.log(log_file, f" - Special cases: {len(special_cases)}") + self.log(log_file, "") + + return { + 'processed': statement_count, + 'reassigned': statements_reassigned, + 'conflicts': conflicts_resolved + } + + def log(self, file_handle, message): + """Write to both log file and stdout""" + file_handle.write(message + '\n') + self.stdout.write(message) diff --git a/applications/composer/backend/composer/migrations/0092_expertconsultant.py b/applications/composer/backend/composer/migrations/0092_expertconsultant.py new file mode 100644 index 00000000..8f6354d6 --- /dev/null +++ b/applications/composer/backend/composer/migrations/0092_expertconsultant.py @@ -0,0 +1,48 @@ +# Generated by Django 4.1.13 on 2025-10-10 14:13 + +import composer.models +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("composer", "0091_alter_sentence_external_ref_alter_sentence_pmcid"), + ] + + operations = [ + migrations.CreateModel( + name="ExpertConsultant", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "uri", + composer.models.UriField( + max_length=500, + validators=[ + composer.models.validate_uri, + ], + ), + ), + ( + "connectivity_statement", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="composer.connectivitystatement", + ), + ), + ], + options={ + "verbose_name_plural": "Expert Consultants", + }, + ), + ] diff --git a/applications/composer/backend/composer/migrations/0093_alter_expertconsultant_uri_alter_provenance_uri.py b/applications/composer/backend/composer/migrations/0093_alter_expertconsultant_uri_alter_provenance_uri.py new file mode 100644 index 00000000..9eabeedc --- /dev/null +++ b/applications/composer/backend/composer/migrations/0093_alter_expertconsultant_uri_alter_provenance_uri.py @@ -0,0 +1,24 @@ +# Generated by Django 4.1.13 on 2025-10-10 18:31 + +import composer.models +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("composer", "0092_expertconsultant"), + ] + + operations = [ + migrations.AlterField( + model_name="expertconsultant", + name="uri", + field=composer.models.UriField(max_length=500), + ), + migrations.AlterField( + model_name="provenance", + name="uri", + field=composer.models.UriField(max_length=500), + ), + ] diff --git a/applications/composer/backend/composer/migrations/0094_alter_connectivitystatementtriple_options_and_more.py b/applications/composer/backend/composer/migrations/0094_alter_connectivitystatementtriple_options_and_more.py new file mode 100644 index 00000000..c0512bb1 --- /dev/null +++ b/applications/composer/backend/composer/migrations/0094_alter_connectivitystatementtriple_options_and_more.py @@ -0,0 +1,202 @@ +# Generated by Django 4.1.13 on 2025-10-13 11:51 + +from django.db import migrations, models +import django.db.models.deletion + + +def migrate_free_text_to_new_model(apps, schema_editor): + """ + Migrate free_text data from ConnectivityStatementTriple to ConnectivityStatementText. + Only migrates records where free_text is not null/empty. + """ + ConnectivityStatementTriple = apps.get_model("composer", "ConnectivityStatementTriple") + ConnectivityStatementText = apps.get_model("composer", "ConnectivityStatementText") + + # Find all triples with free_text + triples_with_text = ConnectivityStatementTriple.objects.filter( + free_text__isnull=False + ).exclude(free_text='') + + # Create new ConnectivityStatementText records + text_records = [] + for triple in triples_with_text: + text_records.append( + ConnectivityStatementText( + connectivity_statement=triple.connectivity_statement, + relationship=triple.relationship, + text=triple.free_text + ) + ) + + # Bulk create the new records + if text_records: + ConnectivityStatementText.objects.bulk_create(text_records) + + # Delete the original triple records that had free_text (they're now in the new model) + triples_with_text.delete() + + +def migrate_free_text_back_to_old_model(apps, schema_editor): + """ + Reverse migration: move text data back to ConnectivityStatementTriple. + """ + ConnectivityStatementTriple = apps.get_model("composer", "ConnectivityStatementTriple") + ConnectivityStatementText = apps.get_model("composer", "ConnectivityStatementText") + + # Find all text records + text_records = ConnectivityStatementText.objects.all() + + # Create ConnectivityStatementTriple records with free_text + triple_records = [] + for text_record in text_records: + triple_records.append( + ConnectivityStatementTriple( + connectivity_statement=text_record.connectivity_statement, + relationship=text_record.relationship, + free_text=text_record.text, + triple=None # free_text records don't have a triple + ) + ) + + # Bulk create the triple records + if triple_records: + ConnectivityStatementTriple.objects.bulk_create(triple_records) + + # Delete the text records (they're back in triples) + text_records.delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("composer", "0093_alter_expertconsultant_uri_alter_provenance_uri"), + ] + + operations = [ + migrations.AlterModelOptions( + name="connectivitystatementtriple", + options={"verbose_name_plural": "Connectivity Statement Triples"}, + ), + migrations.AlterField( + model_name="connectivitystatementtriple", + name="connectivity_statement", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="composer.connectivitystatement", + ), + ), + migrations.AlterField( + model_name="relationship", + name="type", + field=models.CharField( + choices=[ + ("single", "Triple - Single select"), + ("multi", "Triple - Multi select"), + ("text", "Text area"), + ("anatomical_multi", "Anatomical Entity - Multi select"), + ], + max_length=20, + ), + ), + + # Create the new ConnectivityStatementText model + migrations.CreateModel( + name="ConnectivityStatementText", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("text", models.TextField()), + ( + "connectivity_statement", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="composer.connectivitystatement", + ), + ), + ( + "relationship", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="composer.relationship", + ), + ), + ], + options={ + "verbose_name_plural": "Connectivity Statement Texts", + }, + ), + migrations.AddConstraint( + model_name="connectivitystatementtext", + constraint=models.UniqueConstraint( + fields=("connectivity_statement", "relationship"), + name="unique_statement_relationship_text", + ), + ), + + # Migrate free_text data to the new model BEFORE removing the field + migrations.RunPython( + migrate_free_text_to_new_model, + migrate_free_text_back_to_old_model, + ), + + # Now it's safe to remove the free_text field + migrations.RemoveField( + model_name="connectivitystatementtriple", + name="free_text", + ), + + # Create ConnectivityStatementAnatomicalEntity model + migrations.CreateModel( + name="ConnectivityStatementAnatomicalEntity", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "anatomical_entities", + models.ManyToManyField( + blank=True, + related_name="statement_relationships", + to="composer.anatomicalentity", + ), + ), + ( + "connectivity_statement", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="composer.connectivitystatement", + ), + ), + ( + "relationship", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="composer.relationship", + ), + ), + ], + options={ + "verbose_name_plural": "Connectivity Statement Anatomical Entities", + }, + ), + migrations.AddConstraint( + model_name="connectivitystatementanatomicalentity", + constraint=models.UniqueConstraint( + fields=("connectivity_statement", "relationship"), + name="unique_statement_relationship_anatomical", + ), + ), + ] diff --git a/applications/composer/backend/composer/migrations/0095_alter_relationship_type.py b/applications/composer/backend/composer/migrations/0095_alter_relationship_type.py new file mode 100644 index 00000000..c5180808 --- /dev/null +++ b/applications/composer/backend/composer/migrations/0095_alter_relationship_type.py @@ -0,0 +1,80 @@ +# Generated by Django 4.1.13 on 2025-10-13 11:53 + +from django.db import migrations, models + + +def migrate_relationship_types_forward(apps, schema_editor): + """ + Migrate old relationship type values to new naming convention: + - 'single' -> 'triple_single' + - 'multi' -> 'triple_multi' + """ + Relationship = apps.get_model("composer", "Relationship") + + # Update single to triple_single + Relationship.objects.filter(type="single").update(type="triple_single") + + # Update multi to triple_multi + Relationship.objects.filter(type="multi").update(type="triple_multi") + + +def migrate_relationship_types_backward(apps, schema_editor): + """ + Reverse migration: revert to old naming convention: + - 'triple_single' -> 'single' + - 'triple_multi' -> 'multi' + """ + Relationship = apps.get_model("composer", "Relationship") + + # Revert triple_single to single + Relationship.objects.filter(type="triple_single").update(type="single") + + # Revert triple_multi to multi + Relationship.objects.filter(type="triple_multi").update(type="multi") + + +class Migration(migrations.Migration): + + dependencies = [ + ("composer", "0094_alter_connectivitystatementtriple_options_and_more"), + ] + + operations = [ + # Step 1: Temporarily allow both old and new values + migrations.AlterField( + model_name="relationship", + name="type", + field=models.CharField( + choices=[ + ("single", "Single select (deprecated)"), + ("multi", "Multi select (deprecated)"), + ("triple_single", "Triple - Single select"), + ("triple_multi", "Triple - Multi select"), + ("anatomical_multi", "Anatomical Entity - Multi select"), + ("text", "Text area"), + ], + max_length=20, + ), + ), + + # Step 2: Migrate the data + migrations.RunPython( + migrate_relationship_types_forward, + migrate_relationship_types_backward, + ), + + # Step 3: Remove deprecated choices and finalize + migrations.AlterField( + model_name="relationship", + name="type", + field=models.CharField( + choices=[ + ("triple_single", "Triple - Single select"), + ("triple_multi", "Triple - Multi select"), + ("anatomical_multi", "Anatomical Entity - Multi select"), + ("text", "Text area"), + ], + max_length=20, + ), + ), + ] diff --git a/applications/composer/backend/composer/migrations/0096_remove_connectivitystatementtriple_unique_statement_relationship_triple_and_more.py b/applications/composer/backend/composer/migrations/0096_remove_connectivitystatementtriple_unique_statement_relationship_triple_and_more.py new file mode 100644 index 00000000..77627a20 --- /dev/null +++ b/applications/composer/backend/composer/migrations/0096_remove_connectivitystatementtriple_unique_statement_relationship_triple_and_more.py @@ -0,0 +1,141 @@ +# Generated by Django 4.1.13 on 2025-10-13 12:21 + +from django.db import migrations, models + + +def migrate_triple_to_triples_forward(apps, schema_editor): + """ + Migrate data from triple ForeignKey to triples ManyToMany field. + Consolidates duplicate records with the same connectivity_statement and relationship. + """ + ConnectivityStatementTriple = apps.get_model("composer", "ConnectivityStatementTriple") + + # Delete any ConnectivityStatementTriple where triple is NULL (and free text was also null or empty - captured in 0094) + # These are orphaned records + ConnectivityStatementTriple.objects.filter(triple__isnull=True).delete() + + from collections import defaultdict + groups = defaultdict(list) + for statement_triple in ConnectivityStatementTriple.objects.filter( + triple__isnull=False + ).select_related('triple').iterator(chunk_size=1000): + key = (statement_triple.connectivity_statement_id, statement_triple.relationship_id) + groups[key].append(statement_triple) + + records_to_delete_ids = [] + for key, records in groups.items(): + # Keep the first record as the primary one + primary_record = records[0] + # Collect all unique triple IDs from all records + triple_ids = set() + for record in records: + if record.triple_id: + triple_ids.add(record.triple_id) + # Add all triples to the primary record's M2M field in one operation + if triple_ids: + primary_record.triples.add(*triple_ids) + # Collect IDs of duplicate records to delete + records_to_delete_ids.extend([record.id for record in records[1:]]) + + # Bulk delete duplicate records in chunks to avoid memory issues + chunk_size = 1000 + for i in range(0, len(records_to_delete_ids), chunk_size): + chunk = records_to_delete_ids[i:i + chunk_size] + ConnectivityStatementTriple.objects.filter(id__in=chunk).delete() + + +def migrate_triple_to_triples_backward(apps, schema_editor): + """ + Reverse migration: split M2M triples back into separate FK records. + Creates one ConnectivityStatementTriple record per triple in the M2M field. + """ + ConnectivityStatementTriple = apps.get_model("composer", "ConnectivityStatementTriple") + + records_to_create = [] + records_to_update = [] + + # Use iterator() with prefetch_related for efficient M2M access + for statement_triple in ConnectivityStatementTriple.objects.prefetch_related( + 'triples' + ).iterator(chunk_size=1000): + triples = list(statement_triple.triples.all()) + + if len(triples) == 0: + # No triples, keep the record as is with triple=None + continue + elif len(triples) == 1: + # Single triple, just set the FK + statement_triple.triple_id = triples[0].id + records_to_update.append(statement_triple) + else: + # Multiple triples: keep first one in current record, create new records for others + statement_triple.triple_id = triples[0].id + records_to_update.append(statement_triple) + + # Create new records for remaining triples + for triple in triples[1:]: + records_to_create.append( + ConnectivityStatementTriple( + connectivity_statement_id=statement_triple.connectivity_statement_id, + relationship_id=statement_triple.relationship_id, + triple_id=triple.id, + ) + ) + + # Bulk update existing records in chunks + chunk_size = 1000 + for i in range(0, len(records_to_update), chunk_size): + chunk = records_to_update[i:i + chunk_size] + ConnectivityStatementTriple.objects.bulk_update(chunk, ['triple'], batch_size=chunk_size) + + # Bulk create new records in chunks + for i in range(0, len(records_to_create), chunk_size): + chunk = records_to_create[i:i + chunk_size] + ConnectivityStatementTriple.objects.bulk_create(chunk, batch_size=chunk_size) + + +class Migration(migrations.Migration): + + dependencies = [ + ("composer", "0095_alter_relationship_type"), + ] + + operations = [ + # Step 1: Add the new ManyToMany field (triples) while keeping the old FK field + migrations.AddField( + model_name="connectivitystatementtriple", + name="triples", + field=models.ManyToManyField( + blank=True, + related_name="statement_triple_relationships", + to="composer.triple", + ), + ), + + # Step 2: Migrate data from triple FK to triples M2M + migrations.RunPython( + migrate_triple_to_triples_forward, + migrate_triple_to_triples_backward, + ), + + # Step 3: Remove the old constraint that includes triple field + migrations.RemoveConstraint( + model_name="connectivitystatementtriple", + name="unique_statement_relationship_triple", + ), + + # Step 4: Remove the old triple ForeignKey field + migrations.RemoveField( + model_name="connectivitystatementtriple", + name="triple", + ), + + # Step 5: Add the new unique constraint (without triple field) + migrations.AddConstraint( + model_name="connectivitystatementtriple", + constraint=models.UniqueConstraint( + fields=("connectivity_statement", "relationship"), + name="unique_statement_relationship_triple", + ), + ), + ] diff --git a/applications/composer/backend/composer/migrations/0097_relationship_custom_ingestion_code.py b/applications/composer/backend/composer/migrations/0097_relationship_custom_ingestion_code.py new file mode 100644 index 00000000..0fa9dc42 --- /dev/null +++ b/applications/composer/backend/composer/migrations/0097_relationship_custom_ingestion_code.py @@ -0,0 +1,24 @@ +# Generated by Django 4.1.13 on 2025-10-14 16:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ( + "composer", + "0096_remove_connectivitystatementtriple_unique_statement_relationship_triple_and_more", + ), + ] + + operations = [ + migrations.AddField( + model_name="relationship", + name="custom_ingestion_code", + field=models.TextField( + blank=True, + null=True, + ), + ), + ] diff --git a/applications/composer/backend/composer/models.py b/applications/composer/backend/composer/models.py index 49e61047..a02b8538 100644 --- a/applications/composer/backend/composer/models.py +++ b/applications/composer/backend/composer/models.py @@ -93,7 +93,7 @@ def formfield(self, *args, **kwargs): return super().formfield(*args, **kwargs) -def validate_provenance_uri(value): +def validate_uri(value): """Validate that the URI is a valid DOI, PMID, PMCID, or URL""" if not value or not value.strip(): raise ValidationError("URI cannot be empty.") @@ -140,13 +140,13 @@ def validate_provenance_uri(value): ) -class ProvenanceUriField(models.CharField): +class UriField(models.CharField): """Custom field for provenance URIs that accepts DOI, PMID, PMCID, or URLs""" - - def __init__(self, *args, **kwargs): - kwargs['validators'] = kwargs.get('validators', []) + [validate_provenance_uri] - super().__init__(*args, **kwargs) + default_validators = [validate_uri] +# --- Backward compatibility alias for old migrations --- +ProvenanceUriField = UriField +validate_provenance_uri = validate_uri # Model Managers class ConnectivityStatementManager(models.Manager): @@ -895,8 +895,7 @@ def system_exported(self, *args, **kwargs): permission=ConnectivityStatementStateService.has_permission_to_transition_to_invalid, ) def invalid(self, *args, **kwargs): - self.has_statement_been_exported = True - self.save(update_fields = ["has_statement_been_exported"]) + self._perform_export_logic() @transition( field=state, @@ -1050,8 +1049,9 @@ class Relationship(models.Model): title = models.CharField(max_length=255) predicate_name = models.CharField(max_length=255) predicate_uri = models.URLField() - type = models.CharField(max_length=10, choices=RelationshipType.choices) + type = models.CharField(max_length=20, choices=RelationshipType.choices) order = models.PositiveIntegerField(default=0) + custom_ingestion_code = models.TextField(blank=True, null=True) def __str__(self): return self.title @@ -1082,29 +1082,106 @@ class Meta: ] -class ConnectivityStatementTriple(models.Model): - connectivity_statement = models.ForeignKey(ConnectivityStatement, on_delete=models.CASCADE, related_name="statement_triples") +class AbstractConnectivityStatementRelationship(models.Model): + """ + Abstract base class for all connectivity statement relationships. + Provides common fields and behavior for different relationship types. + """ + connectivity_statement = models.ForeignKey( + ConnectivityStatement, + on_delete=models.CASCADE + ) relationship = models.ForeignKey(Relationship, on_delete=models.CASCADE) - triple = models.ForeignKey(Triple, null=True, blank=True, on_delete=models.SET_NULL) - free_text = models.TextField(null=True, blank=True) + + class Meta: + abstract = True + + def __str__(self): + return f"{self.connectivity_statement} - {self.relationship.title}" + + +class ConnectivityStatementTriple(AbstractConnectivityStatementRelationship): + """ + Represents a relationship with triple value(s) (single or multi-select from predefined options). + """ + triples = models.ManyToManyField( + Triple, + blank=True, + related_name='statement_triple_relationships' + ) class Meta: constraints = [ models.UniqueConstraint( - fields=['connectivity_statement', 'relationship', 'triple'], + fields=['connectivity_statement', 'relationship'], name='unique_statement_relationship_triple' ) ] + verbose_name_plural = "Connectivity Statement Triples" def clean(self): - if self.triple and self.free_text: - raise ValidationError("Only one of 'triple' or 'free_text' should be set.") - if not self.triple and not self.free_text: - raise ValidationError("One of 'triple' or 'free_text' must be set.") - if self.relationship.type == RelationshipType.TEXT and self.triple: - raise ValidationError("Text relationships must use 'free_text', not 'triple'.") - if self.relationship.type in [RelationshipType.SINGLE, RelationshipType.MULTI] and self.free_text: - raise ValidationError("Select-type relationships must use 'triple', not 'free_text'.") + if not self.relationship_id: + return + if self.relationship.type not in [RelationshipType.TRIPLE_SINGLE, RelationshipType.TRIPLE_MULTI]: + raise ValidationError("This model should only be used for triple relationships.") + + def save(self, *args, **kwargs): + self.clean() + super().save(*args, **kwargs) + + +class ConnectivityStatementText(AbstractConnectivityStatementRelationship): + """ + Represents a relationship with free text value. + """ + text = models.TextField() + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=['connectivity_statement', 'relationship'], + name='unique_statement_relationship_text' + ) + ] + verbose_name_plural = "Connectivity Statement Texts" + + def clean(self): + if not self.relationship_id: + return + if self.relationship.type != RelationshipType.TEXT: + raise ValidationError("This model should only be used for text relationships.") + if not self.text or not self.text.strip(): + raise ValidationError("Text must be set for text relationships.") + + def save(self, *args, **kwargs): + self.clean() + super().save(*args, **kwargs) + + +class ConnectivityStatementAnatomicalEntity(AbstractConnectivityStatementRelationship): + """ + Represents a relationship with anatomical entity values (single or multi-select). + """ + anatomical_entities = models.ManyToManyField( + AnatomicalEntity, + blank=True, + related_name='statement_relationships' + ) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=['connectivity_statement', 'relationship'], + name='unique_statement_relationship_anatomical' + ) + ] + verbose_name_plural = "Connectivity Statement Anatomical Entities" + + def clean(self): + if not self.relationship_id: + return + if self.relationship.type not in [RelationshipType.ANATOMICAL_MULTI]: + raise ValidationError("This model should only be used for anatomical entity relationships.") def save(self, *args, **kwargs): self.clean() @@ -1301,7 +1378,7 @@ class Provenance(models.Model): connectivity_statement = models.ForeignKey( ConnectivityStatement, on_delete=models.CASCADE ) - uri = ProvenanceUriField(max_length=500) + uri = UriField(max_length=500) def __str__(self): return self.uri @@ -1309,6 +1386,22 @@ def __str__(self): class Meta: verbose_name_plural = "Provenances" + +class ExpertConsultant(models.Model): + """Expert Consultant""" + + connectivity_statement = models.ForeignKey( + ConnectivityStatement, on_delete=models.CASCADE + ) + uri = UriField(max_length=500) + + def __str__(self): + return self.uri + + class Meta: + verbose_name_plural = "Expert Consultants" + + class Note(models.Model): """Note""" @@ -1476,3 +1569,4 @@ class Meta: def __str__(self): return f"{self.alert_type.name} for Statement {self.connectivity_statement.id}" + diff --git a/applications/composer/backend/composer/pure_enums.py b/applications/composer/backend/composer/pure_enums.py new file mode 100644 index 00000000..c38a42e2 --- /dev/null +++ b/applications/composer/backend/composer/pure_enums.py @@ -0,0 +1,56 @@ +from enum import Enum + + +class ConnectivityErrors(Enum): + INVALID_FORWARD_CONNECTION = "Invalid forward connection" + + +class BulkActionType(str, Enum): + ASSIGN_USER = "assign_user" + ASSIGN_TAG = "assign_tag" + WRITE_NOTE = "write_note" + CHANGE_STATUS = "change_status" + ASSIGN_POPULATION_SET = "assign_population_set" + + +class CircuitType(str, Enum): + """ + Circuit type enumeration - Django-independent. + This is the single source of truth for circuit type values. + """ + SENSORY = "SENSORY" + MOTOR = "MOTOR" + INTRINSIC = "INTRINSIC" + PROJECTION = "PROJECTION" + ANAXONIC = "ANAXONIC" + + +class SentenceState(str, Enum): + """ + Sentence state enumeration - Django-independent. + This is the single source of truth for sentence state values. + """ + OPEN = "open" + NEEDS_FURTHER_REVIEW = "needs_further_review" + COMPOSE_LATER = "compose_later" + READY_TO_COMPOSE = "ready_to_compose" + COMPOSE_NOW = "compose_now" + COMPLETED = "completed" + EXCLUDED = "excluded" + + +class CSState(str, Enum): + """ + Connectivity Statement state enumeration - Django-independent. + This is the single source of truth for CS state values. + """ + DRAFT = "draft" + COMPOSE_NOW = "compose_now" + IN_PROGRESS = "in_progress" + TO_BE_REVIEWED = "to_be_reviewed" + REVISE = "revise" + REJECTED = "rejected" + NPO_APPROVED = "npo_approved" + EXPORTED = "exported" + DEPRECATED = "deprecated" + INVALID = "invalid" diff --git a/applications/composer/backend/composer/services/cs_ingestion/cs_ingestion_services.py b/applications/composer/backend/composer/services/cs_ingestion/cs_ingestion_services.py index 4a5e6dcb..72382835 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/cs_ingestion_services.py +++ b/applications/composer/backend/composer/services/cs_ingestion/cs_ingestion_services.py @@ -2,9 +2,9 @@ from django.db import transaction -from composer.models import AlertType +from composer.models import AlertType, Relationship +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH from composer.services.cs_ingestion.helpers.overwritable_helper import ( - filter_statements_by_population_uris, get_overwritable_and_new_statements, ) from composer.services.cs_ingestion.helpers.sentence_helper import ( @@ -20,29 +20,104 @@ from .models import LoggableAnomaly, Severity from .neurondm_script import main as get_statements_from_neurondm -logger_service = LoggerService() +def get_composer_data(): + """ + Get all composer data from the database (custom relationships and alert URIs). + This step MUST run in the Django environment. + + Returns: Dict with 'custom_relationships' and 'statement_alert_uris' + """ + custom_relationships = list( + Relationship.objects.filter( + custom_ingestion_code__isnull=False + ).exclude( + custom_ingestion_code='' + ).values('id', 'title', 'type', 'custom_ingestion_code') + ) + + statement_alert_uris = list(AlertType.objects.values_list("uri", flat=True)) + + return { + 'custom_relationships': custom_relationships, + 'statement_alert_uris': statement_alert_uris, + } -def ingest_statements( - update_upstream=False, - update_anatomical_entities=False, - disable_overwrite=False, + +def process_neurondm( full_imports=[], label_imports=[], population_uris=None, + composer_data=None, + logger_service_param=None, ): + """ + Process NeuroDM neurons, execute custom code, filter by population. + This is a thin wrapper around get_statements_from_neurondm. + + Args: + full_imports: List of full imports + label_imports: List of label imports + population_uris: Set of population URIs to filter (None means all) + composer_data: Dict with 'custom_relationships' and 'statement_alert_uris' (will query from DB if None) + logger_service_param: Logger service instance (optional) + + Returns: List of composer statement dictionaries + """ + if logger_service_param is None: + logger_service_param = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH + ) + + # If composer_data not provided, get it from database + if composer_data is None: + composer_data = get_composer_data() + + custom_relationships = composer_data.get('custom_relationships', []) + statement_alert_uris = set(composer_data.get('statement_alert_uris', [])) statements_list = get_statements_from_neurondm( full_imports=full_imports, label_imports=label_imports, - logger_service_param=logger_service, - statement_alert_uris=set(AlertType.objects.values_list("uri", flat=True)), + logger_service_param=logger_service_param, + statement_alert_uris=statement_alert_uris, + population_uris=population_uris, + custom_relationships=custom_relationships, ) - # Filter statements by population URIs if a population file was provided - statements_list = filter_statements_by_population_uris(statements_list, population_uris) + return statements_list + + +def ingest_to_database( + statements_list, + update_upstream=False, + update_anatomical_entities=False, + disable_overwrite=False, + force_state_transition=False, + logger_service_param=None, +): + """ + Validate and ingest statements into the database. + + Args: + statements_list: List of composer statement dictionaries from process_neurondm + update_upstream: Whether to update upstream statements after ingestion + update_anatomical_entities: Whether to update anatomical entities + disable_overwrite: Whether to disable overwriting existing statements + force_state_transition: If True, allows state transitions from any state (e.g., TO_BE_REVIEWED -> EXPORTED). + Use when ingesting pre-filtered populations. + logger_service_param: Logger service instance (optional) + + Returns: Boolean indicating successful transaction + """ + if logger_service_param is None: + logger_service_param = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH + ) overridable_and_new_statements = get_overwritable_and_new_statements( - statements_list, disable_overwrite, population_uris + statements_list, disable_overwrite, force_overwrite=force_state_transition ) statements = validate_statements( overridable_and_new_statements, update_anatomical_entities @@ -54,13 +129,13 @@ def ingest_statements( for statement in statements: sentence, _ = get_or_create_sentence(statement) create_or_update_connectivity_statement( - statement, sentence, update_anatomical_entities, logger_service, population_uris + statement, sentence, update_anatomical_entities, logger_service_param, force_state_transition ) update_forward_connections(statements) except Exception as e: - logger_service.add_anomaly( + logger_service_param.add_anomaly( LoggableAnomaly( statement_id=None, entity_id=None, @@ -71,11 +146,52 @@ def ingest_statements( successful_transaction = False logging.error(f"Ingestion aborted due to {e}") - logger_service.write_anomalies_to_file() if successful_transaction: if update_upstream: update_upstream_statements() - logger_service.write_ingested_statements_to_file(statements) + + return successful_transaction + + +def ingest_statements( + update_upstream=False, + update_anatomical_entities=False, + disable_overwrite=False, + full_imports=[], + label_imports=[], + population_uris=None, +): + """ + Complete ingestion process: runs all 3 steps. + This is a convenience wrapper that maintains backward compatibility. + """ + logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH + ) + + # Get composer data (custom relationships and alert URIs) + composer_data = get_composer_data() + + # Process NeuroDM neurons + statements_list = process_neurondm( + full_imports=full_imports, + label_imports=label_imports, + population_uris=population_uris, + composer_data=composer_data, + logger_service_param=logger_service, + ) + + # Database ingestion + # When population_uris is provided, use force_state_transition to allow state changes from any state + successful_transaction = ingest_to_database( + statements_list=statements_list, + update_upstream=update_upstream, + update_anatomical_entities=update_anatomical_entities, + disable_overwrite=disable_overwrite, + force_state_transition=(population_uris is not None), + logger_service_param=logger_service, + ) return successful_transaction diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/changes_detector.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/changes_detector.py index 0683f430..9261e996 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/changes_detector.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/changes_detector.py @@ -2,7 +2,7 @@ from neurondm import orders from composer.models import AnatomicalEntity -from composer.services.cs_ingestion.helpers.common_helpers import VALIDATION_ERRORS, SPECIES, PROVENANCE, ID, \ +from composer.services.cs_ingestion.helpers.common_helpers import VALIDATION_ERRORS, SPECIES, PROVENANCE, EXPERT_CONSULTANTS, ID, \ FORWARD_CONNECTION, ORIGINS, VIAS, DESTINATIONS from composer.services.cs_ingestion.models import ValidationErrors @@ -36,6 +36,12 @@ def has_changes(connectivity_statement, statement, defaults): if current_provenance != new_provenance: return True + # Check for changes in expert consultants + current_expert_consultants = set(expert.uri for expert in connectivity_statement.expertconsultant_set.all()) + new_expert_consultants = set(statement.get(EXPERT_CONSULTANTS) or []) + if current_expert_consultants != new_expert_consultants: + return True + # Check for changes in forward_connection current_forward_connections = set( connection.reference_uri for connection in connectivity_statement.forward_connection.all()) diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/common_helpers.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/common_helpers.py index 99f44c0d..f915caa4 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/common_helpers.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/common_helpers.py @@ -1,7 +1,6 @@ import logging -from composer.enums import CircuitType -from composer.models import AnatomicalEntityMeta +from composer.pure_enums import CircuitType ID = "id" ORIGINS = "origins" @@ -12,6 +11,11 @@ SENTENCE_NUMBER = 'sentence_number' ENTITY_URI = 'loc' TYPE = 'type' +ANATOMICAL_ENTITIES = 'anatomical_entities' +FROM_ENTITIES = 'from_entities' +ORDER = 'order' +REGION = 'region' +LAYER = 'layer' CIRCUIT_TYPE = 'circuit_type' FUNCTIONAL_CIRCUIT_ROLE = 'circuit_role' SEX = 'sex' @@ -19,6 +23,7 @@ OTHER_PHENOTYPE = 'other_phenotypes' SPECIES = 'species' PROVENANCE = 'provenance' +EXPERT_CONSULTANTS = 'expert_consultants' NOTE_ALERT = 'note_alert' FORWARD_CONNECTION = "forward_connection" STATEMENT_ALERTS = "statement_alerts" diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/getters.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/getters.py index 29383f14..b10dd7cb 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/getters.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/getters.py @@ -7,12 +7,16 @@ ProjectionPhenotype, PopulationSet, ) +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH from composer.services.cs_ingestion.helpers.common_helpers import get_value_or_none, SEX, FUNCTIONAL_CIRCUIT_ROLE, ID, \ CIRCUIT_TYPE, CIRCUIT_TYPE_MAPPING, PHENOTYPE, OTHER_PHENOTYPE from composer.services.cs_ingestion.logging_service import LoggerService from composer.services.cs_ingestion.models import LoggableAnomaly -logger_service = LoggerService() +logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH +) def get_sex(statement: Dict) -> Sex: diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/overwritable_helper.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/overwritable_helper.py index 49b91404..5a4c6024 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/overwritable_helper.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/overwritable_helper.py @@ -2,12 +2,16 @@ from composer.enums import CSState, SentenceState from composer.models import Sentence, ConnectivityStatement +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH from composer.services.cs_ingestion.helpers.common_helpers import ID from composer.services.cs_ingestion.logging_service import STATEMENT_INCORRECT_STATE, SENTENCE_INCORRECT_STATE, \ LoggerService from composer.services.cs_ingestion.models import LoggableAnomaly -logger_service = LoggerService() +logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH +) @@ -34,11 +38,11 @@ def filter_statements_by_population_uris(statements_list, population_uris): ] -def get_overwritable_and_new_statements(statements_list: List[Dict[str, Any]], disable_overwrite: bool=False, population_uris: Optional[Set[str]]=None) -> List[Dict[str, Any]]: +def get_overwritable_and_new_statements(statements_list: List[Dict[str, Any]], disable_overwrite: bool=False, force_overwrite: bool=False) -> List[Dict[str, Any]]: overwritable_and_new_statements = [ statement for statement in statements_list - if is_new_or_overwritable_statement(statement, disable_overwrite, population_uris) + if is_new_or_overwritable_statement(statement, disable_overwrite, force_overwrite) ] return overwritable_and_new_statements @@ -56,11 +60,15 @@ def is_new_or_overwritable_sentence(statement: Dict, disable_overwrite: bool) -> return can_sentence_be_overwritten(sentence, statement) -def is_new_or_overwritable_statement(statement: Dict, disable_overwrite: bool, population_uris: Optional[Set[str]]=None) -> bool: +def is_new_or_overwritable_statement(statement: Dict, disable_overwrite: bool, force_overwrite: bool=False) -> bool: """ If disable_overwrite is True, then the statement is considered invalid for overwriting - if it already exists in the database. - However, statements with URIs in population_uris should be updatable regardless of their status (unless disable_overwrite is True). - Note: When population_uris is provided, statement filtering is done at the service layer. + However, if force_overwrite is True, statements should be updatable regardless of their status (unless disable_overwrite is True). + + Args: + statement: The statement dictionary + disable_overwrite: If True, prevents all overwrites + force_overwrite: If True, allows overwriting statements in any state (e.g., when ingesting pre-filtered populations) """ statement_uri = statement[ID] @@ -68,12 +76,12 @@ def is_new_or_overwritable_statement(statement: Dict, disable_overwrite: bool, p try: connectivity_statement = ConnectivityStatement.objects.get(reference_uri=statement_uri) - # If disable_overwrite is True, then no overwrites should happen, not even the ones from the population file + # If disable_overwrite is True, then no overwrites should happen if disable_overwrite: return False - # If the statement URI is in the population_uris set, it should be updatable regardless of status - if population_uris is not None and statement_uri in population_uris: + # If force_overwrite is True, allow overwriting regardless of state + if force_overwrite: return True except ConnectivityStatement.DoesNotExist: diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/sentence_helper.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/sentence_helper.py index 80cf324d..36acafb5 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/sentence_helper.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/sentence_helper.py @@ -4,12 +4,16 @@ from composer.enums import SentenceState from composer.models import Sentence +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH from composer.services.cs_ingestion.helpers.common_helpers import SENTENCE_NUMBER, LABEL, ID from composer.services.cs_ingestion.logging_service import LoggerService from composer.services.cs_ingestion.models import LoggableAnomaly NOW = datetime.now().strftime("%Y%m%d%H%M%S") -logger_service = LoggerService() +logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH +) def get_or_create_sentence(statement: Dict) -> Tuple[Sentence, bool]: diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/statement_helper.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/statement_helper.py index 94318a6e..c13ae752 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/statement_helper.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/statement_helper.py @@ -1,11 +1,12 @@ import re from typing import Dict, Tuple, List +import traceback from django.contrib.auth.models import User from composer.services.cs_ingestion.logging_service import LoggerService from composer.services.state_services import ConnectivityStatementStateService -from composer.enums import CSState, NoteType +from composer.enums import CSState, NoteType, RelationshipType from composer.management.commands.ingest_nlp_sentence import ID from composer.models import ( AlertType, @@ -14,7 +15,14 @@ Note, Specie, Provenance, + ExpertConsultant, StatementAlert, + Relationship, + Triple, + ConnectivityStatementTriple, + ConnectivityStatementText, + ConnectivityStatementAnatomicalEntity, + AnatomicalEntity, ) from composer.services.cs_ingestion.helpers.anatomical_entities_helper import ( add_origins, @@ -30,6 +38,7 @@ STATE, NOTE_ALERT, PROVENANCE, + EXPERT_CONSULTANTS, SPECIES, FORWARD_CONNECTION, ) @@ -60,7 +69,7 @@ def create_or_update_connectivity_statement( sentence: Sentence, update_anatomical_entities: bool, logger_service: LoggerService, - population_uris: set = None, + force_state_transition: bool = False, ) -> Tuple[ConnectivityStatement, bool]: """ Create or update a connectivity statement from ingested data. @@ -70,8 +79,8 @@ def create_or_update_connectivity_statement( sentence: The associated sentence object update_anatomical_entities: Whether to update anatomical entity relationships logger_service: Service for logging anomalies - population_uris: Set of URIs from population file. When provided, the system_exported - transition is used to allow state changes from any state. + force_state_transition: If True, allows state transitions from any state (e.g., TO_BE_REVIEWED -> EXPORTED). + Use when ingesting pre-filtered populations. Returns: Tuple of (ConnectivityStatement, created) where created is True if new @@ -88,7 +97,7 @@ def create_or_update_connectivity_statement( "population": get_or_create_populationset(populationset_name), "projection_phenotype": get_projection_phenotype(statement), "reference_uri": statement[ID], - "state": CSState.EXPORTED, + "state": CSState.NPO_APPROVED, "curie_id": statement[LABEL], } @@ -111,8 +120,8 @@ def create_or_update_connectivity_statement( validation_errors = statement.get(VALIDATION_ERRORS, ValidationErrors()) # State transitions: Handle validation errors and state updates - # When population_uris is provided (population file used), use system_exported - # transition to allow state changes from any state + # When force_state_transition is True, use system_exported transition + # to allow state changes from any state (e.g., TO_BE_REVIEWED -> EXPORTED) if validation_errors.has_errors(): error_message = validation_errors.to_string() if connectivity_statement.state != CSState.INVALID: @@ -121,9 +130,9 @@ def create_or_update_connectivity_statement( create_invalid_note(connectivity_statement, error_message) else: # Statement is valid - attempt transition to EXPORTED - # Use system_exported transition when population_uris is provided + # Use system_exported transition when force_state_transition is True # This allows transitioning from any state (e.g., TO_BE_REVIEWED -> EXPORTED) - if population_uris is not None: + if force_state_transition: if connectivity_statement.state != CSState.EXPORTED: transition_success, error_message = do_system_transition_to_exported(connectivity_statement) if not transition_success: @@ -154,11 +163,247 @@ def create_or_update_connectivity_statement( update_many_to_many_fields( connectivity_statement, statement, update_anatomical_entities ) + + # Process dynamic relationships with custom code + process_dynamic_relationships(connectivity_statement, statement, logger_service, update_anatomical_entities) + statement[STATE] = connectivity_statement.state return connectivity_statement, created +def process_dynamic_relationships( + connectivity_statement: ConnectivityStatement, + statement: Dict, + logger_service: LoggerService, + update_anatomical_entities: bool = False, +): + """ + Reads pre-computed results from Step 1 and creates database entities. + """ + # Get pre-computed custom relationship results from Step 1 + custom_results = statement.get('_custom_relationship_results', {}) + + if not custom_results: + return + + # Get all relationships to map IDs to objects + relationship_ids = [int(rel_id) for rel_id in custom_results.keys()] + relationships = {r.id: r for r in Relationship.objects.filter(id__in=relationship_ids)} + + for relationship_id_str, result in custom_results.items(): + relationship_id = int(relationship_id_str) + relationship = relationships.get(relationship_id) + if not relationship: + continue + + try: + # Process result based on relationship type + if relationship.type == RelationshipType.TRIPLE_MULTI or relationship.type == RelationshipType.TRIPLE_SINGLE: + process_triple_relationship(connectivity_statement, relationship, result, logger_service) + elif relationship.type == RelationshipType.TEXT: + process_text_relationship(connectivity_statement, relationship, result) + elif relationship.type == RelationshipType.ANATOMICAL_MULTI: + process_anatomical_relationship(connectivity_statement, relationship, result, logger_service, update_anatomical_entities) + else: + log_custom_relationship_error( + logger_service, + f"Unknown relationship type: {relationship.type}", + statement.get(ID), + relationship.id, + {'relationship_title': relationship.title, 'type': relationship.type} + ) + + except Exception as e: + # Log error and continue with other relationships + log_custom_relationship_error( + logger_service, + f"Failed to process custom relationship '{relationship.title}': {str(e)}", + statement.get(ID), + relationship.id, + { + 'relationship_title': relationship.title, + 'error': str(e), + 'traceback': traceback.format_exc() + } + ) + + +def process_triple_relationship( + connectivity_statement: ConnectivityStatement, + relationship: Relationship, + result: List[Dict], + logger_service: LoggerService, +): + """ + Process TRIPLE relationship results. + Expected result format: [{'name': str, 'uri': str}, ...] + """ + if not isinstance(result, list): + result = [result] + + triples = [] + for item in result: + if not isinstance(item, dict) or 'name' not in item or 'uri' not in item: + log_custom_relationship_error( + logger_service, + f"Invalid triple format for relationship '{relationship.title}': {item}", + connectivity_statement.reference_uri, + relationship.id, + {'item': str(item), 'relationship_title': relationship.title} + ) + continue + + # Get or create the triple + triple, created = Triple.objects.get_or_create( + name=item['name'], + uri=item['uri'], + relationship=relationship + ) + triples.append(triple) + + if triples: + # Get or create the ConnectivityStatementTriple + cs_triple, created = ConnectivityStatementTriple.objects.get_or_create( + connectivity_statement=connectivity_statement, + relationship=relationship + ) + cs_triple.triples.set(triples) + + +def process_text_relationship( + connectivity_statement: ConnectivityStatement, + relationship: Relationship, + result, +): + """ + Process TEXT relationship results. + Expected result format: string or list of strings + """ + if isinstance(result, list): + text = ', '.join(str(item) for item in result) + else: + text = str(result) + + # Get or create the ConnectivityStatementText + cs_text, created = ConnectivityStatementText.objects.update_or_create( + connectivity_statement=connectivity_statement, + relationship=relationship, + defaults={'text': text} + ) + + +def process_anatomical_relationship( + connectivity_statement: ConnectivityStatement, + relationship: Relationship, + result: List, + logger_service: LoggerService, + update_anatomical_entities: bool = False, +): + """ + Process ANATOMICAL_ENTITY relationship results. + + Expected result formats: + - Simple entities: [uri1, uri2, ...] + - Region-layer pairs: [{'region': 'region_uri', 'layer': 'layer_uri'}, ...] + - Mixed: list combining both formats + """ + from composer.services.cs_ingestion.helpers.anatomical_entities_helper import ( + get_or_create_simple_entity, + get_or_create_complex_entity, + ) + from composer.services.cs_ingestion.exceptions import EntityNotFoundException + + if not isinstance(result, list): + result = [result] + + anatomical_entities = [] + for item in result: + try: + # Check if item is a region-layer pair (dict with 'region' and 'layer' keys) + if isinstance(item, dict) and 'region' in item and 'layer' in item: + # Process as region-layer pair + region_uri = str(item['region']) + layer_uri = str(item['layer']) + ae, _ = get_or_create_complex_entity(region_uri, layer_uri, update_anatomical_entities) + anatomical_entities.append(ae) + elif isinstance(item, dict): + # Invalid dict format - log error + log_custom_relationship_error( + logger_service, + f"Invalid anatomical entity format (expected dict with 'region' and 'layer' keys or a URI string): {item}", + connectivity_statement.reference_uri, + relationship.id, + {'item': str(item), 'relationship_title': relationship.title} + ) + else: + # Process as simple entity URI + uri = str(item) + ae, _ = get_or_create_simple_entity(uri) + anatomical_entities.append(ae) + except EntityNotFoundException as e: + log_custom_relationship_error( + logger_service, + f"Anatomical entity not found: {str(e)} in relationship '{relationship.title}'", + connectivity_statement.reference_uri, + relationship.id, + {'item': str(item), 'relationship_title': relationship.title, 'error': str(e)} + ) + except AnatomicalEntity.DoesNotExist: + log_custom_relationship_error( + logger_service, + f"Anatomical entity not found for item '{item}' in relationship '{relationship.title}'", + connectivity_statement.reference_uri, + relationship.id, + {'item': str(item), 'relationship_title': relationship.title} + ) + except Exception as e: + log_custom_relationship_error( + logger_service, + f"Error processing anatomical entity '{item}' in relationship '{relationship.title}': {str(e)}", + connectivity_statement.reference_uri, + relationship.id, + {'item': str(item), 'relationship_title': relationship.title, 'error': str(e)} + ) + + if anatomical_entities: + # Get or create the ConnectivityStatementAnatomicalEntity + cs_ae, created = ConnectivityStatementAnatomicalEntity.objects.get_or_create( + connectivity_statement=connectivity_statement, + relationship=relationship + ) + cs_ae.anatomical_entities.set(anatomical_entities) + + +def log_custom_relationship_error( + logger_service: LoggerService, + message: str, + statement_reference: str = None, + relationship_id: int = None, + details: dict = None, +): + """ + Log custom relationship errors using the LoggerService. + These errors are added to the anomalies log file. + """ + from composer.services.cs_ingestion.models import LoggableAnomaly, Severity + + # Format detailed error message + error_msg = f"[CUSTOM_RELATIONSHIP] {message}" + if details: + error_msg += f" | Details: {details}" + + # Add to logger service as an anomaly + logger_service.add_anomaly( + LoggableAnomaly( + statement_id=statement_reference, + entity_id=str(relationship_id) if relationship_id else None, + message=error_msg, + severity=Severity.WARNING + ) + ) + + def update_many_to_many_fields( connectivity_statement: ConnectivityStatement, statement: Dict, @@ -171,17 +416,31 @@ def update_many_to_many_fields( for provenance in connectivity_statement.provenance_set.all(): provenance.delete() + for expert_consultant in connectivity_statement.expertconsultant_set.all(): + expert_consultant.delete() + for destination in connectivity_statement.destinations.all(): destination.delete() for via in connectivity_statement.via_set.all(): via.delete() + + # Clear dynamic relationship data + for cs_triple in connectivity_statement.connectivitystatementtriple_set.all(): + cs_triple.delete() + + for cs_text in connectivity_statement.connectivitystatementtext_set.all(): + cs_text.delete() + + for cs_ae in connectivity_statement.connectivitystatementanatomicalentity_set.all(): + cs_ae.delete() add_origins(connectivity_statement, statement, update_anatomical_entities) add_vias(connectivity_statement, statement, update_anatomical_entities) add_destinations(connectivity_statement, statement, update_anatomical_entities) add_species(connectivity_statement, statement) add_provenances(connectivity_statement, statement) + add_expert_consultants(connectivity_statement, statement) add_notes(connectivity_statement, statement) @@ -206,6 +465,16 @@ def add_provenances(connectivity_statement: ConnectivityStatement, statement: Di Provenance.objects.bulk_create(provenances) +def add_expert_consultants(connectivity_statement: ConnectivityStatement, statement: Dict): + expert_consultants_list = statement.get(EXPERT_CONSULTANTS, []) + if expert_consultants_list: + expert_consultants = ( + ExpertConsultant(connectivity_statement=connectivity_statement, uri=uri) + for uri in expert_consultants_list + ) + ExpertConsultant.objects.bulk_create(expert_consultants) + + def add_species(connectivity_statement: ConnectivityStatement, statement: Dict): species = Specie.objects.filter(ontology_uri__in=statement[SPECIES]) connectivity_statement.species.add(*species) diff --git a/applications/composer/backend/composer/services/cs_ingestion/helpers/validators.py b/applications/composer/backend/composer/services/cs_ingestion/helpers/validators.py index 2ab8c3da..e9687421 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/helpers/validators.py +++ b/applications/composer/backend/composer/services/cs_ingestion/helpers/validators.py @@ -3,13 +3,17 @@ from neurondm import orders from composer.models import ConnectivityStatement, Sex, Specie, Region, AnatomicalEntityMeta, Layer +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH from composer.services.cs_ingestion.helpers.common_helpers import ID, VALIDATION_ERRORS, ORIGINS, DESTINATIONS, VIAS, \ SEX, SPECIES, FORWARD_CONNECTION from composer.services.cs_ingestion.logging_service import LoggerService from composer.services.cs_ingestion.models import ValidationErrors, LoggableAnomaly from django.db.models import Model as DjangoModel -logger_service = LoggerService() +logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH +) def validate_statements(statements: List[Dict[str, Any]], update_anatomical_entities: bool) -> List[Dict[str, Any]]: diff --git a/applications/composer/backend/composer/services/cs_ingestion/logging_service.py b/applications/composer/backend/composer/services/cs_ingestion/logging_service.py index 1a0e7678..2ca7747d 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/logging_service.py +++ b/applications/composer/backend/composer/services/cs_ingestion/logging_service.py @@ -1,13 +1,13 @@ import csv from typing import List, Dict -from composer.enums import CSState, SentenceState +from composer.pure_enums import CSState, SentenceState from composer.services.cs_ingestion.helpers.common_helpers import ID, LABEL, STATE, VALIDATION_ERRORS from composer.services.cs_ingestion.models import LoggableAnomaly AXIOM_NOT_FOUND = "Entity not found in any axiom" -SENTENCE_INCORRECT_STATE = f"Sentence already found and is not in {SentenceState.COMPOSE_NOW} state" -STATEMENT_INCORRECT_STATE = f"Statement already found and is not in {CSState.EXPORTED} or {CSState.INVALID} state" +SENTENCE_INCORRECT_STATE = f"Sentence already found and is not in {SentenceState.COMPOSE_NOW.value} state" +STATEMENT_INCORRECT_STATE = f"Statement already found and is not in {CSState.EXPORTED.value} or {CSState.INVALID.value} state" INCONSISTENT_AXIOMS = "Region and layer found in different axioms" @@ -26,14 +26,52 @@ def __call__(cls, *args, **kwargs): class LoggerService(metaclass=SingletonMeta): - def __init__(self, ingestion_anomalies_log_path='ingestion_anomalies_log.csv', - ingested_log_path='ingested_log.csv'): + def __init__(self, ingestion_anomalies_log_path: str, ingested_log_path: str): + """ + Initialize LoggerService with explicit log file paths. + + Args: + ingestion_anomalies_log_path: Full path to the anomalies log CSV file + ingested_log_path: Full path to the ingested statements log CSV file + """ self.anomalies_log_path = ingestion_anomalies_log_path self.ingested_log_path = ingested_log_path self.anomalies = [] def add_anomaly(self, error: LoggableAnomaly): self.anomalies.append(error) + + def load_anomalies_from_json(self, json_path: str): + """ + Load anomalies from a JSON file (from previous workflow steps). + Expected format: [{"statement_id": ..., "entity_id": ..., "message": ..., "severity": ...}, ...] + """ + import json + import os + from composer.services.cs_ingestion.models import Severity + + if not os.path.exists(json_path): + return # No previous anomalies to load + + try: + with open(json_path, 'r', encoding='utf-8') as f: + anomalies_data = json.load(f) + + for anomaly_dict in anomalies_data: + severity_str = anomaly_dict.get('severity', 'warning') + severity = Severity.ERROR if severity_str == 'error' else Severity.WARNING + + anomaly = LoggableAnomaly( + statement_id=anomaly_dict.get('statement_id'), + entity_id=anomaly_dict.get('entity_id'), + message=anomaly_dict.get('message', ''), + severity=severity, + ) + self.anomalies.append(anomaly) + except Exception as e: + # Log error but don't fail - just skip loading previous anomalies + import logging + logging.warning(f"Could not load anomalies from {json_path}: {e}") def write_anomalies_to_file(self): with open(self.anomalies_log_path, 'w', newline='') as file: diff --git a/applications/composer/backend/composer/services/cs_ingestion/models.py b/applications/composer/backend/composer/services/cs_ingestion/models.py index 0fd1cc40..78d1d197 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/models.py +++ b/applications/composer/backend/composer/services/cs_ingestion/models.py @@ -1,10 +1,72 @@ from enum import Enum -from typing import Set, Optional +from typing import Set, Optional, Dict, List, Any + +from composer.services.cs_ingestion.helpers.common_helpers import ( + ANATOMICAL_ENTITIES, + FROM_ENTITIES, + ORDER, + TYPE, + REGION, + LAYER, + ORIGINS, + VIAS, + DESTINATIONS, + STATEMENT_ALERTS, + VALIDATION_ERRORS, +) class NeuronDMOrigin: def __init__(self, anatomical_entities: Set): self.anatomical_entities = anatomical_entities + + def to_dict(self) -> Dict: + """Convert NeuronDMOrigin to a JSON-serializable dictionary.""" + return { + ANATOMICAL_ENTITIES: self._convert_anatomical_entities_to_list(self.anatomical_entities) + } + + @classmethod + def from_dict(cls, data: Dict) -> 'NeuronDMOrigin': + """Create NeuronDMOrigin from a dictionary (deserialization).""" + anatomical_entities = cls._convert_list_to_anatomical_entities( + data.get(ANATOMICAL_ENTITIES, []) + ) + return cls(anatomical_entities) + + @staticmethod + def _convert_anatomical_entities_to_list(anatomical_entities: Set) -> List: + """Convert a set of anatomical entities to a list for JSON serialization.""" + from neurondm import orders + + result = [] + for entity in anatomical_entities: + if isinstance(entity, orders.rl): + # For region-layer pairs, create a structured representation + result.append({ + REGION: str(entity.region), + LAYER: str(entity.layer) + }) + else: + result.append(str(entity)) + return result + + @staticmethod + def _convert_list_to_anatomical_entities(entities_list: List) -> Set: + """Convert a list back to a set of anatomical entities (deserialization).""" + from neurondm import orders + from pyontutils.core import OntId + + result = set() + for entity in entities_list: + if isinstance(entity, dict) and REGION in entity and LAYER in entity: + # This is a region-layer pair, recreate orders.rl object + region = OntId(entity[REGION]) + layer = OntId(entity[LAYER]) + result.add(orders.rl(region, layer)) + else: + result.add(str(entity)) + return result class NeuronDMVia: @@ -13,6 +75,31 @@ def __init__(self, anatomical_entities: Set, from_entities: Set, order: int, typ self.from_entities = from_entities self.order = order self.type = type + + def to_dict(self) -> Dict: + """Convert NeuronDMVia to a JSON-serializable dictionary.""" + return { + ANATOMICAL_ENTITIES: NeuronDMOrigin._convert_anatomical_entities_to_list(self.anatomical_entities), + FROM_ENTITIES: NeuronDMOrigin._convert_anatomical_entities_to_list(self.from_entities), + ORDER: self.order, + TYPE: self.type + } + + @classmethod + def from_dict(cls, data: Dict) -> 'NeuronDMVia': + """Create NeuronDMVia from a dictionary (deserialization).""" + anatomical_entities = NeuronDMOrigin._convert_list_to_anatomical_entities( + data.get(ANATOMICAL_ENTITIES, []) + ) + from_entities = NeuronDMOrigin._convert_list_to_anatomical_entities( + data.get(FROM_ENTITIES, []) + ) + return cls( + anatomical_entities=anatomical_entities, + from_entities=from_entities, + order=data.get(ORDER, 0), + type=data.get(TYPE, '') + ) class NeuronDMDestination: @@ -20,6 +107,29 @@ def __init__(self, anatomical_entities: Set, from_entities: Set, type: str): self.anatomical_entities = anatomical_entities self.from_entities = from_entities self.type = type + + def to_dict(self) -> Dict: + """Convert NeuronDMDestination to a JSON-serializable dictionary.""" + return { + ANATOMICAL_ENTITIES: NeuronDMOrigin._convert_anatomical_entities_to_list(self.anatomical_entities), + FROM_ENTITIES: NeuronDMOrigin._convert_anatomical_entities_to_list(self.from_entities), + TYPE: self.type + } + + @classmethod + def from_dict(cls, data: Dict) -> 'NeuronDMDestination': + """Create NeuronDMDestination from a dictionary (deserialization).""" + anatomical_entities = NeuronDMOrigin._convert_list_to_anatomical_entities( + data.get(ANATOMICAL_ENTITIES, []) + ) + from_entities = NeuronDMOrigin._convert_list_to_anatomical_entities( + data.get(FROM_ENTITIES, []) + ) + return cls( + anatomical_entities=anatomical_entities, + from_entities=from_entities, + type=data.get(TYPE, '') + ) class Severity(Enum): @@ -78,3 +188,102 @@ def has_errors(self) -> bool: self.axiom_not_found or self.non_specified ) + + +def convert_statement_to_json_serializable(statement: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert a statement dict with NeuronDM objects to a JSON-serializable format. + + This function handles conversion of: + - NeuronDMOrigin objects to dicts + - Lists of NeuronDMVia objects to dicts + - Lists of NeuronDMDestination objects to dicts + - statement_alerts tuples (alert_uri, alert_text) to list format + - Removes non-serializable objects (_neuron, validation_errors) + + Note: validation_errors are handled separately by the logging service and not serialized. + + Args: + statement: Statement dict from neurondm_script.for_composer() + + Returns: + JSON-serializable dict with all objects converted and non-serializable objects removed + """ + result = statement.copy() + + # Convert origins + if isinstance(result.get(ORIGINS), NeuronDMOrigin): + result[ORIGINS] = result[ORIGINS].to_dict() + + # Convert vias + if isinstance(result.get(VIAS), list): + result[VIAS] = [v.to_dict() if isinstance(v, NeuronDMVia) else v for v in result[VIAS]] + + # Convert destinations + if isinstance(result.get(DESTINATIONS), list): + result[DESTINATIONS] = [d.to_dict() if isinstance(d, NeuronDMDestination) else d for d in result[DESTINATIONS]] + + # Convert statement_alerts from tuples (alert_uri, alert_text) to lists for JSON + if STATEMENT_ALERTS in result and result[STATEMENT_ALERTS]: + alerts = result[STATEMENT_ALERTS] + if alerts and isinstance(alerts[0], (tuple, list)): + # Convert tuples/lists to simple list format [alert_uri, alert_text] + result[STATEMENT_ALERTS] = [ + [str(item[0]), str(item[1])] + for item in alerts + ] + + # Remove non-serializable objects + result.pop('_neuron', None) + result.pop(VALIDATION_ERRORS, None) # validation_errors are handled by the logging service + + return result + + +def convert_statement_from_json(statement: Dict[str, Any]) -> Dict[str, Any]: + """ + Convert a JSON-serialized statement back to object format. + + This is the inverse of convert_statement_to_json_serializable(). + It recreates NeuronDM objects (including orders.rl) from their dict representations + and converts statement_alerts back to tuples. + + Args: + statement: Statement dict from JSON with serialized objects + + Returns: + Statement dict with NeuronDM objects restored and statement_alerts as tuples + """ + result = statement.copy() + + # Convert origins from dict to object + if isinstance(result.get(ORIGINS), dict): + result[ORIGINS] = NeuronDMOrigin.from_dict(result[ORIGINS]) + + # Convert vias from dicts to objects + if isinstance(result.get(VIAS), list): + result[VIAS] = [ + NeuronDMVia.from_dict(v) if isinstance(v, dict) else v + for v in result[VIAS] + ] + + # Convert destinations from dicts to objects + if isinstance(result.get(DESTINATIONS), list): + result[DESTINATIONS] = [ + NeuronDMDestination.from_dict(d) if isinstance(d, dict) else d + for d in result[DESTINATIONS] + ] + + # Convert statement_alerts from lists [alert_uri, alert_text] back to tuples + if STATEMENT_ALERTS in result and result[STATEMENT_ALERTS]: + alerts = result[STATEMENT_ALERTS] + if alerts and isinstance(alerts[0], list): + # Convert lists back to tuples (alert_uri, alert_text) + result[STATEMENT_ALERTS] = [ + (item[0], item[1]) + for item in alerts + ] + + # Note: validation_errors are handled by the logging service + + return result diff --git a/applications/composer/backend/composer/services/cs_ingestion/neurondm_new_field_ingestion_service.py b/applications/composer/backend/composer/services/cs_ingestion/neurondm_new_field_ingestion_service.py index 2e053ef9..1eb160f3 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/neurondm_new_field_ingestion_service.py +++ b/applications/composer/backend/composer/services/cs_ingestion/neurondm_new_field_ingestion_service.py @@ -2,9 +2,13 @@ from .neurondm_script import main as get_statements_from_neurondm from composer.services.cs_ingestion.helpers.common_helpers import ID from composer.models import ConnectivityStatement +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH from django.core.exceptions import FieldDoesNotExist -logger_service = LoggerService() +logger_service = LoggerService( + ingestion_anomalies_log_path=INGESTION_ANOMALIES_LOG_PATH, + ingested_log_path=INGESTION_INGESTED_LOG_PATH +) def check_if_connectivity_statement_field_exists(cs_field): try: diff --git a/applications/composer/backend/composer/services/cs_ingestion/neurondm_script.py b/applications/composer/backend/composer/services/cs_ingestion/neurondm_script.py index 0300f0db..e6c4af4c 100644 --- a/applications/composer/backend/composer/services/cs_ingestion/neurondm_script.py +++ b/applications/composer/backend/composer/services/cs_ingestion/neurondm_script.py @@ -1,4 +1,5 @@ import os +import traceback from typing import Optional, Tuple, List, Set, Dict import rdflib @@ -7,7 +8,6 @@ from neurondm.core import OntTerm, OntId, RDFL from pyontutils.core import OntGraph, OntResIri, OntResPath from pyontutils.namespaces import rdfs, ilxtr -from django.core.management.base import BaseCommand, CommandError import logging import re @@ -79,6 +79,12 @@ def for_composer(n, statement_alert_uris: Set[str] = None): if str(item[0]) in statement_alert_uris ] + expert_consultants = [ + r for ref in lrdf(n, ilxtr.reference) + for r in ref.split() + if 'orcid' in r + ] + fc = dict( id=str(n.id_), label=lrdf(n, rdfs.label)[0], @@ -98,10 +104,13 @@ def for_composer(n, statement_alert_uris: Set[str] = None): + lpes(n, ilxtr.hasProjectionPhenotype)), forward_connection=lpes(n, ilxtr.hasForwardConnectionPhenotype), provenance=lrdf(n, ilxtr.literatureCitation), + expert_consultants=expert_consultants, sentence_number=lrdf(n, ilxtr.sentenceNumber), note_alert=lrdf(n, ilxtr.alertNote), validation_errors=validation_errors, statement_alerts=statement_alerts, + # Expose neuron object for custom relationship processing + _neuron=n, ) return fc @@ -117,7 +126,9 @@ def get_connections(n, lpes): destinations_from_axioms = create_uri_type_dict(lpes, {ilxtr.hasAxonPresynapticElementIn: 'AXON-T', ilxtr.hasAxonSensorySubcellularElementIn: 'AFFERENT-T'}) vias_from_axioms = create_uri_type_dict(lpes, - {ilxtr.hasAxonLocatedIn: 'AXON', ilxtr.hasDendriteLocatedIn: 'DENDRITE'}) + {ilxtr.hasAxonLocatedIn: 'AXON', + ilxtr.hasDendriteLocatedIn: 'DENDRITE', + ilxtr.hasAxonLeadingToSensorySubcellularElementIn: 'SENSORY_AXON'}) tmp_origins, tmp_vias, tmp_destinations, validation_errors = process_connections(partial_order, set(origins_from_axioms), @@ -447,9 +458,62 @@ def update_from_entities(origins: NeuronDMOrigin, vias: List[NeuronDMVia], desti return origins, vias, destinations +def process_custom_relationships_for_statement(statement: Dict, custom_relationships: List[Dict], logger_service: LoggerService): + """ + Execute custom code for relationships on a statement (Step 1 of ingestion). + This runs during NeuroDM processing, before database interaction. + + Args: + statement: The statement dict with neuron data including '_neuron' object + custom_relationships: List of dicts with relationship info (id, title, type, custom_ingestion_code) + logger_service: Service for logging anomalies + + Returns: + Dict mapping relationship_id to execution result + """ + results = {} + + for relationship_info in custom_relationships: + try: + # Prepare execution context - only provide fc dict + exec_globals = { + 'fc': statement, + } + exec_locals = {} + + # Execute the custom code + exec(relationship_info['custom_ingestion_code'], exec_globals, exec_locals) + + # Get the result variable + if 'result' not in exec_locals: + logger_service.add_anomaly( + LoggableAnomaly( + statement_id=statement.get('id'), + entity_id=str(relationship_info['id']), + message=f"[CUSTOM_RELATIONSHIP] Custom code for relationship '{relationship_info['title']}' did not define 'result' variable", + severity=Severity.WARNING + ) + ) + continue + + results[relationship_info['id']] = exec_locals['result'] + + except Exception as e: + logger_service.add_anomaly( + LoggableAnomaly( + statement_id=statement.get('id'), + entity_id=str(relationship_info['id']), + message=f"[CUSTOM_RELATIONSHIP] Error executing custom code for relationship '{relationship_info['title']}': {str(e)} | Details: {{'relationship_title': '{relationship_info['title']}', 'error': '{str(e)}', 'traceback': '{traceback.format_exc()}', 'code': '{relationship_info['custom_ingestion_code']}'}}", + severity=Severity.WARNING + ) + ) + + return results + + ## Based on: ## https://github.com/tgbugs/pyontutils/blob/30c415207b11644808f70c8caecc0c75bd6acb0a/neurondm/docs/composer.py#L668-L698 -def main(local=False, full_imports=[], label_imports=[], logger_service_param=Optional[LoggerService], statement_alert_uris: Set[str] = None): +def main(local=False, full_imports=[], label_imports=[], logger_service_param=Optional[LoggerService], statement_alert_uris: Set[str] = None, population_uris: Set[str] = None, custom_relationships: List[Dict] = None): global logger_service logger_service = logger_service_param @@ -526,8 +590,23 @@ def main(local=False, full_imports=[], label_imports=[], logger_service_param=Op if statement_alert_uris is None: statement_alert_uris = set() + # Filter neurons by population URIs BEFORE for_composer processing + # This is done in Step 1 to avoid processing neurons that won't be ingested + if population_uris is not None: + # Get neuron IDs and filter + neurons = [n for n in neurons if str(n.id_) in population_uris] + fcs = [for_composer(n, statement_alert_uris) for n in neurons] composer_statements = [item for item in fcs if item is not None] + + # Process custom relationships for each statement (Step 1) + if custom_relationships: + for statement in composer_statements: + custom_results = process_custom_relationships_for_statement( + statement, custom_relationships, logger_service + ) + # Store results in the statement dict for Step 2 + statement['_custom_relationship_results'] = custom_results return composer_statements diff --git a/applications/composer/backend/composer/services/dynamic_schema_service.py b/applications/composer/backend/composer/services/dynamic_schema_service.py index e1e3cd3e..7fc72235 100644 --- a/applications/composer/backend/composer/services/dynamic_schema_service.py +++ b/applications/composer/backend/composer/services/dynamic_schema_service.py @@ -26,9 +26,9 @@ def inject_dynamic_relationship_schema(schema): if rel.type == RelationshipType.TEXT: field["type"] = "string" - elif rel.type == RelationshipType.SINGLE: + elif rel.type == RelationshipType.TRIPLE_SINGLE: field["type"] = ["string", "null"] - elif rel.type == RelationshipType.MULTI: + elif rel.type == RelationshipType.TRIPLE_MULTI or rel.type == RelationshipType.ANATOMICAL_MULTI: field["type"] = "array" field["items"] = {"type": "object"} diff --git a/applications/composer/backend/composer/services/errors_service.py b/applications/composer/backend/composer/services/errors_service.py index 80104a31..9ed2d90b 100644 --- a/applications/composer/backend/composer/services/errors_service.py +++ b/applications/composer/backend/composer/services/errors_service.py @@ -1,4 +1,4 @@ -from composer.enums import ConnectivityErrors +from composer.pure_enums import ConnectivityErrors from composer.services.state_services import ConnectivityStatementStateService diff --git a/applications/composer/backend/composer/services/export/helpers/csv.py b/applications/composer/backend/composer/services/export/helpers/csv.py index 41c24dd1..7f83efe2 100644 --- a/applications/composer/backend/composer/services/export/helpers/csv.py +++ b/applications/composer/backend/composer/services/export/helpers/csv.py @@ -104,6 +104,12 @@ def get_export_queryset(base_qs): "via_set__from_entities", "destinations__anatomical_entities", "destinations__from_entities", + "statement_alerts__alert_type", + "connectivitystatementtriple_set__relationship", + "connectivitystatementtriple_set__triples", + "connectivitystatementtext_set__relationship", + "connectivitystatementanatomicalentity_set__relationship", + "connectivitystatementanatomicalentity_set__anatomical_entities", ) .order_by("state_order", "state", "id") ) @@ -144,6 +150,7 @@ def generate_csv_attributes_mapping() -> Dict[str, Callable]: "Connected from uri": get_connected_from_uri, "Curation notes": get_curation_notes, "Reference (pubmed ID, DOI or text)": get_reference, + "Expert Consultant": get_expert_consultants, "Has nerve branches": has_nerve_branches, "Approved by SAWG": is_approved_by_sawg, "Review notes": get_review_notes, @@ -275,3 +282,7 @@ def get_review_notes(cs: ConnectivityStatement, row: Row): def get_reference(cs: ConnectivityStatement, row: Row): return ", ".join(procenance.uri for procenance in cs.provenance_set.all()) + + +def get_expert_consultants(cs: ConnectivityStatement, row: Row): + return ", ".join(expert.uri for expert in cs.expertconsultant_set.all()) diff --git a/applications/composer/backend/composer/services/export/helpers/rows.py b/applications/composer/backend/composer/services/export/helpers/rows.py index 7d802f60..842544b7 100644 --- a/applications/composer/backend/composer/services/export/helpers/rows.py +++ b/applications/composer/backend/composer/services/export/helpers/rows.py @@ -163,7 +163,25 @@ def get_rows(cs: ConnectivityStatement) -> List[Row]: ) - for cst in cs.statement_triples.select_related("relationship", "triple").all(): + # Dynamic Relationships - Triples (single/multi select) + for cst in cs.connectivitystatementtriple_set.select_related("relationship").prefetch_related("triples").all(): + predicate_mapping = DynamicExportRelationship( + predicate=cst.relationship.predicate_name, + label=cst.relationship.title, + uri=cst.relationship.predicate_uri, + ) + # Handle multiple triples - create one row per triple + for triple in cst.triples.all(): + rows.append( + Row( + object=triple.name, + object_uri=triple.uri, + predicate_mapping=predicate_mapping, + ) + ) + + # Dynamic Relationships - Text (free text) + for cst in cs.connectivitystatementtext_set.select_related("relationship").all(): predicate_mapping = DynamicExportRelationship( predicate=cst.relationship.predicate_name, label=cst.relationship.title, @@ -171,12 +189,29 @@ def get_rows(cs: ConnectivityStatement) -> List[Row]: ) rows.append( Row( - object=cst.triple.name if cst.triple else cst.free_text, - object_uri=cst.triple.uri if cst.triple else "", + object=cst.text, + object_uri="", predicate_mapping=predicate_mapping, ) ) + # Dynamic Relationships - Anatomical Entities (single/multi select) + for cst in cs.connectivitystatementanatomicalentity_set.select_related("relationship").prefetch_related("anatomical_entities").all(): + predicate_mapping = DynamicExportRelationship( + predicate=cst.relationship.predicate_name, + label=cst.relationship.title, + uri=cst.relationship.predicate_uri, + ) + # Handle multiple anatomical entities - create one row per entity + for anatomical_entity in cst.anatomical_entities.all(): + rows.append( + Row( + object=anatomical_entity.name, + object_uri=anatomical_entity.ontology_uri, + predicate_mapping=predicate_mapping, + ) + ) + # the composer URI rows.append(get_composer_uri_row(cs)) return rows diff --git a/applications/composer/backend/composer/services/workflows/export.py b/applications/composer/backend/composer/services/workflows/export.py index e78bfca3..599c280a 100644 --- a/applications/composer/backend/composer/services/workflows/export.py +++ b/applications/composer/backend/composer/services/workflows/export.py @@ -47,6 +47,7 @@ def run_export_workflow(user: User, scheme: str = "https") -> None: "queue": "default", # not needed but required by cloudharness "payload": json.dumps( { + "type": "export", "file_url": file_url, "email": user.email, } diff --git a/applications/composer/backend/composer/services/workflows/ingestion.py b/applications/composer/backend/composer/services/workflows/ingestion.py new file mode 100644 index 00000000..b75b53ac --- /dev/null +++ b/applications/composer/backend/composer/services/workflows/ingestion.py @@ -0,0 +1,140 @@ +import json +import os +from django.conf import settings +from django.contrib.auth.models import User +from composer.constants import INGESTION_ANOMALIES_LOG_PATH, INGESTION_INGESTED_LOG_PATH +from composer.services.workflows.ingestion_utils import ( + get_ingestion_timestamp, + get_ingestion_temp_file_paths, +) + + +def get_volume_directory(current_app) -> str: + return f"{current_app.harness.deployment.volume.name}:{settings.MEDIA_ROOT}" + + +def run_ingestion_workflow( + user: User, + update_upstream: bool = False, + update_anatomical_entities: bool = False, + disable_overwrite: bool = False, + full_imports: list = None, + label_imports: list = None, + population_file_path: str = None, + timestamp: str = None, +) -> None: + from cloudharness.workflows import tasks, operations + from cloudharness.applications import get_current_configuration + + current_app = get_current_configuration() + + # Create unique filenames for intermediate data + if timestamp is None: + timestamp = get_ingestion_timestamp() + + temp_paths = get_ingestion_temp_file_paths(timestamp) + composer_data_file = temp_paths['composer_data'] + intermediate_file = temp_paths['intermediate'] + anomalies_log_file = temp_paths['anomalies_log'] + + # Ensure the directory exists + os.makedirs(os.path.dirname(intermediate_file), exist_ok=True) + + # Step 0: Get composer data (custom relationships and alert URIs) from database + step0_command = [ + "python", + "manage.py", + "get_composer_data", + f"--output_filepath={composer_data_file}", + ] + + get_composer_data_task = tasks.CustomTask( + name="get-composer-data-task", + image_name="composer", + command=step0_command, + ) + + # Build command for step 1: process_neurondm + step1_command = [ + "python", + "process_neurondm_standalone.py", + f"--input_filepath={composer_data_file}", + f"--output_filepath={intermediate_file}", + f"--anomalies_csv_output={INGESTION_ANOMALIES_LOG_PATH}", + f"--ingested_csv_output={INGESTION_INGESTED_LOG_PATH}", + ] + + if full_imports: + step1_command.append("--full_imports") + step1_command.extend(full_imports) + + if label_imports: + step1_command.append("--label_imports") + step1_command.extend(label_imports) + + if population_file_path: + step1_command.append(f"--population_file={population_file_path}") + + # Step 1: Process neurondm (uses composer-neurondm image with neurondm packages) + process_task = tasks.CustomTask( + name="process-neurondm-task", + image_name="composer-neurondm", + command=step1_command, + ) + + # Build command for step 2: ingest_to_database + step2_command = [ + "python", + "manage.py", + "ingest_to_database", + f"--input_filepath={intermediate_file}", + f"--anomalies_csv_input={INGESTION_ANOMALIES_LOG_PATH}", + ] + + if update_upstream: + step2_command.append("--update_upstream") + + if update_anatomical_entities: + step2_command.append("--update_anatomical_entities") + + if disable_overwrite: + step2_command.append("--disable_overwrite") + + # Add force_state_transition if population filtering is enabled + # This allows state transitions when ingesting pre-filtered populations + if population_file_path is not None: + step2_command.append("--force_state_transition") + + # Step 2: Ingest to database (uses composer image with Django) + ingest_task = tasks.CustomTask( + name="ingest-to-database-task", + image_name="composer", + command=step2_command, + ) + + # on-exit notify task + on_exit_notify = { + "image": "composer-notify", + "queue": "default", # not needed but required by cloudharness + "payload": json.dumps( + { + "type": "ingestion", + "email": user.email, + } + ), + "command": ["python", "notify.py"], + } + + # Create pipeline operation with all three tasks + op = operations.PipelineOperation( + basename="ingestion-op", + tasks=[ + get_composer_data_task, # Step 0: Get composer data from database + process_task, # Step 1: Process neurondm + ingest_task, # Step 2: Ingest to database + ], + shared_directory=get_volume_directory(current_app), + on_exit_notify=on_exit_notify, + ) + + op.execute() diff --git a/applications/composer/backend/composer/services/workflows/ingestion_utils.py b/applications/composer/backend/composer/services/workflows/ingestion_utils.py new file mode 100644 index 00000000..ef0761f4 --- /dev/null +++ b/applications/composer/backend/composer/services/workflows/ingestion_utils.py @@ -0,0 +1,62 @@ +""" +Utility functions for ingestion workflow file management. +""" +import os +from datetime import datetime +from composer.constants import INGESTION_TEMP_DIR + + +def get_ingestion_timestamp() -> str: + """ + Generate a timestamp string for ingestion file naming. + Format: YYYY-MM-DD_HH-MM-SS + + Returns: + str: Timestamp string + """ + return datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + + +def get_timestamped_population_filename(original_filename: str, timestamp: str) -> str: + """ + Generate a timestamped filename for a population file. + + Args: + original_filename: The original uploaded filename (e.g., "populations.txt") + timestamp: Timestamp string from get_ingestion_timestamp() + + Returns: + str: Full path to the timestamped population file in the temporary directory + + Example: + get_timestamped_population_filename("pop.txt", "2025-01-15_10-30-45") + -> "/path/to/media/ingestion/ingestion_temp/pop_2025-01-15_10-30-45.txt" + """ + # Split filename into base and extension + name_parts = os.path.splitext(original_filename) + base_name = name_parts[0] + extension = name_parts[1] if len(name_parts) > 1 else '' + + # Create timestamped filename + timestamped_filename = f"{base_name}_{timestamp}{extension}" + + # Return full path (INGESTION_TEMP_DIR already contains full path) + return os.path.join(INGESTION_TEMP_DIR, timestamped_filename) + + +def get_ingestion_temp_file_paths(timestamp: str) -> dict: + """ + Generate all temporary file paths for an ingestion workflow. + + Args: + timestamp: Timestamp string from get_ingestion_timestamp() + + Returns: + dict: Dictionary containing paths for all temporary files + """ + # INGESTION_TEMP_DIR already contains full path + return { + 'composer_data': os.path.join(INGESTION_TEMP_DIR, f"composer_data_{timestamp}.json"), + 'intermediate': os.path.join(INGESTION_TEMP_DIR, f"statements_{timestamp}.json"), + 'anomalies_log': os.path.join(INGESTION_TEMP_DIR, f"anomalies_{timestamp}.json"), + } diff --git a/applications/composer/backend/composer/signals.py b/applications/composer/backend/composer/signals.py index 666a14ac..05fb2501 100644 --- a/applications/composer/backend/composer/signals.py +++ b/applications/composer/backend/composer/signals.py @@ -49,12 +49,20 @@ def post_transition_callback(sender, instance, name, source, target, **kwargs): sentence = instance else: sentence = None + + # Customize message based on whether transition was done by system (ingestion) or user + if user and user.username == "system": + note_message = f"Automatically transitioned from {source} to {target} during automated processes (e.g., ingestion)." + else: + user_name = f"{user.first_name} {user.last_name}" if user else "Unknown user" + note_message = f"User {user_name} transitioned this record from {source} to {target}" + Note.objects.create( user=system_user, type=NoteType.TRANSITION, connectivity_statement=connectivity_statement, sentence=sentence, - note=f"User {user.first_name} {user.last_name} transitioned this record from {source} to {target}", + note=note_message, ) diff --git a/applications/composer/backend/composer/templates/admin/download_logs.html b/applications/composer/backend/composer/templates/admin/download_logs.html new file mode 100644 index 00000000..a87af829 --- /dev/null +++ b/applications/composer/backend/composer/templates/admin/download_logs.html @@ -0,0 +1,53 @@ +{% extends "admin/base.html" %} +{% load i18n static jazzmin composer_extras %} +{% get_jazzmin_ui_tweaks as jazzmin_ui %} + +{% block bodyclass %}{{ block.super }} download-logs{% endblock %} + +{% block content_title %} {% trans 'Download Ingestion Logs' %} {% endblock %} + +{% block breadcrumbs %} + +{% endblock %} + +{% block content %} +
+

Ingestion Log Files

+

Download CSV log files from the most recent ingestion process.

+ +
+
+

Ingestion Anomalies Log

+

Contains all errors and warnings encountered during the ingestion process, including validation issues, data quality problems, and processing errors.

+

Format: severity, statement_id, entity_id, message

+ + Download Anomalies Log + +
+ +
+

Ingested Statements Log

+

Contains all connectivity statements processed during ingestion with their final states, showing which statements were successfully imported and which failed.

+

Format: statement_id, label, state, reason

+ + Download Ingested Statements Log + +
+
+ +
+

About These Logs

+
    +
  • Anomalies Log: Tracks all issues during ingestion +
  • +
  • Ingested Statements Log: Provides a record of all statements processed and their outcomes +
  • +
  • Both files are CSV format and can be opened in Excel, Google Sheets, or any spreadsheet application.
  • +
  • Log files are overwritten each time the ingestion process runs. Download them before running a new ingestion if you need to preserve the data.
  • +
+
+
+{% endblock %} diff --git a/applications/composer/backend/composer/templates/admin/index.html b/applications/composer/backend/composer/templates/admin/index.html index 0ebae22a..f4aed7e0 100644 --- a/applications/composer/backend/composer/templates/admin/index.html +++ b/applications/composer/backend/composer/templates/admin/index.html @@ -106,6 +106,25 @@
Ingest Sentences
+
+
+
+
+
Connectivity Statements Ingestion
+
+
+

Configure and trigger the ingestion of connectivity statements from the neurondm repository.

+ + Configure & Start Ingestion + + + Download Ingestion Logs + +
+
+
+
+
{% for app in dashboard_list %} diff --git a/applications/composer/backend/composer/templates/admin/ingest_statements.html b/applications/composer/backend/composer/templates/admin/ingest_statements.html new file mode 100644 index 00000000..9dc423e6 --- /dev/null +++ b/applications/composer/backend/composer/templates/admin/ingest_statements.html @@ -0,0 +1,141 @@ +{% extends "admin/base.html" %} +{% load i18n static jazzmin composer_extras %} +{% get_jazzmin_ui_tweaks as jazzmin_ui %} + +{% block bodyclass %}{{ block.super }} connectivity-statements-ingestion{% endblock %} + +{% block content_title %} {% trans 'Connectivity Statements Ingestion' %} {% endblock %} + +{% block breadcrumbs %} + +{% endblock %} + +{% block content %} +
+
+
+
Ingest Connectivity Statements from neurondm
+
+
+

Configure and trigger the ingestion of connectivity statements from the neurondm repository.

+

Note: This process will run asynchronously using Argo workflows. You will receive an email notification when the ingestion is complete.

+
+ {% csrf_token %} + +
+
Boolean Options
+ +
+ {{ form.update_upstream }} + + {{ form.update_upstream.help_text }} +
+ +
+ {{ form.update_anatomical_entities }} + + {{ form.update_anatomical_entities.help_text }} +
+ +
+ {{ form.disable_overwrite }} + + {{ form.disable_overwrite.help_text }} +
+
+ +
+ +
+
Import Configuration
+ +
+ + {{ form.full_imports }} + {{ form.full_imports.help_text }} +
+ +
+ + {{ form.label_imports }} + {{ form.label_imports.help_text }} +
+ +
+ + {{ form.population_file }} + {{ form.population_file.help_text }} +
+
+ +
+ + + + Cancel +
+ + +
+
+
+ + + + +{% endblock %} diff --git a/applications/composer/backend/composer/urls.py b/applications/composer/backend/composer/urls.py index 9fdc9856..735c8e77 100644 --- a/applications/composer/backend/composer/urls.py +++ b/applications/composer/backend/composer/urls.py @@ -5,6 +5,7 @@ urlpatterns = [ path("api/composer/", include("composer.api.urls")), path("composer/export", views.export, name="export"), + path("composer/ingest-statements", views.ingest_statements, name="ingest-statements"), path("login", views.index, name="index"), path("logged-out/", views.logout_landing, name="logged-out"), ] diff --git a/applications/composer/backend/composer/views.py b/applications/composer/backend/composer/views.py index 294cb7f0..d7a9b782 100644 --- a/applications/composer/backend/composer/views.py +++ b/applications/composer/backend/composer/views.py @@ -3,7 +3,15 @@ from django.template import loader from django.urls import reverse from composer.services.workflows.export import run_export_workflow +from composer.services.workflows.ingestion import run_ingestion_workflow +from composer.services.workflows.ingestion_utils import ( + get_ingestion_timestamp, + get_timestamped_population_filename, +) +from composer.constants import INGESTION_TEMP_DIR from django.contrib import messages +from django.views.decorators.http import require_http_methods +import os def index(request): if not hasattr(request, "user") or not request.user.is_authenticated: @@ -45,7 +53,83 @@ def export(request): messages.error(request, "Export failed: your account does not have an email address configured.") return HttpResponse("Missing user email", status=400) - run_export_workflow(user=user, scheme=request.scheme) + run_export_workflow(user=user) messages.success(request, "Export process started. You will receive an email when it is complete.") return HttpResponse("Export started", status=202) + + +@require_http_methods(["POST"]) +def ingest_statements(request): + """ + Triggers the ingestion of connectivity statements from neurondm. + Accepts parameters to configure the ingestion process. + The ingestion runs asynchronously via Argo workflow and notifies the user by email upon completion. + """ + user = request.user + + if not user.is_staff: + return HttpResponse("Unauthorized", status=401) + + if not user.email: + messages.error(request, "Ingestion failed: your account does not have an email address configured.") + return HttpResponse("Missing user email", status=400) + + try: + # Parse form data + data = request.POST + + # Get boolean flags + update_upstream = data.get('update_upstream') == 'on' + update_anatomical_entities = data.get('update_anatomical_entities') == 'on' + disable_overwrite = data.get('disable_overwrite') == 'on' + + # Get list fields and parse them + full_imports = None + full_imports_raw = data.get('full_imports', '').strip() + if full_imports_raw: + # Split by commas or newlines and filter empty strings + full_imports = [x.strip() for x in full_imports_raw.replace('\n', ',').split(',') if x.strip()] + + label_imports = None + label_imports_raw = data.get('label_imports', '').strip() + if label_imports_raw: + # Split by commas or newlines and filter empty strings + label_imports = [x.strip() for x in label_imports_raw.replace('\n', ',').split(',') if x.strip()] + + # Generate timestamp for this ingestion workflow + timestamp = get_ingestion_timestamp() + + # Handle population file upload + population_file_path = None + if 'population_file' in request.FILES: + uploaded_file = request.FILES['population_file'] + os.makedirs(INGESTION_TEMP_DIR, exist_ok=True) + + population_file_path = get_timestamped_population_filename( + uploaded_file.name, + timestamp + ) + + with open(population_file_path, 'wb+') as destination: + for chunk in uploaded_file.chunks(): + destination.write(chunk) + + # Run the ingestion workflow asynchronously + run_ingestion_workflow( + user=user, + update_upstream=update_upstream, + update_anatomical_entities=update_anatomical_entities, + disable_overwrite=disable_overwrite, + full_imports=full_imports, + label_imports=label_imports, + population_file_path=population_file_path, + timestamp=timestamp, + ) + + messages.success(request, "Ingestion process started. You will receive an email when it is complete.") + return HttpResponse("Ingestion started", status=202) + + except Exception as e: + messages.error(request, f"Failed to start ingestion: {str(e)}") + return HttpResponse(f"Failed to start ingestion: {str(e)}", status=500) diff --git a/applications/composer/backend/requirements.txt b/applications/composer/backend/requirements.txt index f5015dbd..3de48054 100644 --- a/applications/composer/backend/requirements.txt +++ b/applications/composer/backend/requirements.txt @@ -1,11 +1,11 @@ packaging==21.3 -Django==4.2.25 +Django==4.2.26 uvicorn==0.20.0 starlette>=0.41.3 pillow>=11.0.0 psycopg2-binary==2.9.5 djangorestframework==3.14.0 -django-filter==22.1 +django-filter==23.5 django-fsm==2.8.1 django-fsm-admin@git+https://github.com/gadventures/django-fsm-admin@master django-admin-sortable2==2.2.8 @@ -21,7 +21,6 @@ aiohttp==3.11.18 crossref-commons==0.0.7 neurondm==0.1.8 django-cors-headers==4.3.1 -django-silk==5.2.0 django-debug-toolbar==4.3.0 django-sslserver-v2==1.0 black==22.12.0 diff --git a/applications/composer/backend/test_serialization_standalone.py b/applications/composer/backend/test_serialization_standalone.py new file mode 100644 index 00000000..5b30805b --- /dev/null +++ b/applications/composer/backend/test_serialization_standalone.py @@ -0,0 +1,449 @@ +#!/usr/bin/env python3 +""" +Standalone test for serialization/deserialization without Django. +Run this script directly: python test_serialization_standalone.py +""" +import json +import sys +import os + +# Add the backend to the path +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +from composer.services.cs_ingestion.models import ( + NeuronDMOrigin, + NeuronDMVia, + NeuronDMDestination, + ValidationErrors, + convert_statement_to_json_serializable, + convert_statement_from_json, +) + + +def test_origin_round_trip(): + """Test NeuronDMOrigin serialization round-trip.""" + print("\n=== Testing NeuronDMOrigin Round-Trip ===") + + # Create origin + entities = { + 'http://purl.obolibrary.org/obo/UBERON_0001234', + 'http://purl.obolibrary.org/obo/UBERON_0005678' + } + origin1 = NeuronDMOrigin(entities) + print(f"Original entities: {origin1.anatomical_entities}") + + # Serialize + origin_dict = origin1.to_dict() + print(f"Serialized: {json.dumps(origin_dict, indent=2)}") + + # Deserialize + origin2 = NeuronDMOrigin.from_dict(origin_dict) + print(f"Deserialized entities: {origin2.anatomical_entities}") + + # Verify + assert origin1.anatomical_entities == origin2.anatomical_entities, "Data mismatch!" + print("✓ Round-trip successful!") + return True + + +def test_region_layer_serialization(): + """Test serialization of region-layer (orders.rl) entities.""" + print("\n=== Testing Region-Layer (orders.rl) Serialization ===") + + try: + from neurondm import orders + from pyontutils.core import OntId + + # Create region-layer pair + region = OntId('http://purl.obolibrary.org/obo/UBERON_0002222') + layer = OntId('http://purl.obolibrary.org/obo/UBERON_0003333') + rl_entity = orders.rl(region, layer) + + # Create origin with mixed entities (simple and region-layer) + anatomical_entities = { + 'http://purl.obolibrary.org/obo/UBERON_0001111', + rl_entity + } + origin1 = NeuronDMOrigin(anatomical_entities) + + print(f"Original entities: {len(origin1.anatomical_entities)} items") + print(f" - Contains orders.rl: {any(isinstance(e, orders.rl) for e in origin1.anatomical_entities)}") + + # Serialize + origin_dict = origin1.to_dict() + print(f"\nSerialized: {json.dumps(origin_dict, indent=2)}") + + # Deserialize + origin2 = NeuronDMOrigin.from_dict(origin_dict) + print(f"\nDeserialized entities: {len(origin2.anatomical_entities)} items") + + # Verify orders.rl was reconstructed + rl_entities = [e for e in origin2.anatomical_entities if isinstance(e, orders.rl)] + print(f" - Contains orders.rl: {len(rl_entities) > 0}") + + if rl_entities: + reconstructed_rl = rl_entities[0] + print(f" - Region: {reconstructed_rl.region}") + print(f" - Layer: {reconstructed_rl.layer}") + + # Verify region and layer match + assert str(reconstructed_rl.region) == str(region), "Region mismatch!" + assert str(reconstructed_rl.layer) == str(layer), "Layer mismatch!" + + print("✓ Region-layer serialization successful!") + return True + except ImportError as e: + print(f"⚠ neurondm not available: {e}") + print(" This test requires neurondm to be installed") + return False + + +def test_via_round_trip(): + """Test NeuronDMVia serialization round-trip.""" + print("\n=== Testing NeuronDMVia Round-Trip ===") + + # Create via + via1 = NeuronDMVia( + anatomical_entities={'http://purl.obolibrary.org/obo/UBERON_0001111'}, + from_entities={'http://purl.obolibrary.org/obo/UBERON_0000000'}, + order=0, + type='AXON' + ) + print(f"Original: order={via1.order}, type={via1.type}") + + # Serialize + via_dict = via1.to_dict() + print(f"Serialized: {json.dumps(via_dict, indent=2)}") + + # Deserialize + via2 = NeuronDMVia.from_dict(via_dict) + print(f"Deserialized: order={via2.order}, type={via2.type}") + + # Verify + assert via1.anatomical_entities == via2.anatomical_entities, "Anatomical entities mismatch!" + assert via1.from_entities == via2.from_entities, "From entities mismatch!" + assert via1.order == via2.order, "Order mismatch!" + assert via1.type == via2.type, "Type mismatch!" + print("✓ Round-trip successful!") + return True + + +def test_destination_round_trip(): + """Test NeuronDMDestination serialization round-trip.""" + print("\n=== Testing NeuronDMDestination Round-Trip ===") + + # Create destination + dest1 = NeuronDMDestination( + anatomical_entities={'http://purl.obolibrary.org/obo/UBERON_0002222'}, + from_entities={'http://purl.obolibrary.org/obo/UBERON_0001111'}, + type='AXON-T' + ) + print(f"Original: type={dest1.type}") + + # Serialize + dest_dict = dest1.to_dict() + print(f"Serialized: {json.dumps(dest_dict, indent=2)}") + + # Deserialize + dest2 = NeuronDMDestination.from_dict(dest_dict) + print(f"Deserialized: type={dest2.type}") + + # Verify + assert dest1.anatomical_entities == dest2.anatomical_entities, "Anatomical entities mismatch!" + assert dest1.from_entities == dest2.from_entities, "From entities mismatch!" + assert dest1.type == dest2.type, "Type mismatch!" + print("✓ Round-trip successful!") + return True + + +def test_statement_round_trip(): + """Test full statement serialization round-trip.""" + print("\n=== Testing Full Statement Round-Trip ===") + + # Create mock statement with statement_alerts as tuples (alert_uri, alert_text) + mock_statement = { + 'id': 'http://uri.interlex.org/test/statement/1', + 'label': 'Test Neuron', + 'pref_label': 'Test Neuron Preferred Label', + 'origins': NeuronDMOrigin({ + 'http://purl.obolibrary.org/obo/UBERON_0001234' + }), + 'destinations': [ + NeuronDMDestination( + {'http://purl.obolibrary.org/obo/UBERON_0002222'}, + {'http://purl.obolibrary.org/obo/UBERON_0001111'}, + type='AXON-T' + ), + NeuronDMDestination( + {'http://purl.obolibrary.org/obo/UBERON_0003333'}, + {'http://purl.obolibrary.org/obo/UBERON_0001111'}, + type='AFFERENT-T' + ) + ], + 'vias': [ + NeuronDMVia( + {'http://purl.obolibrary.org/obo/UBERON_0001111'}, + {'http://purl.obolibrary.org/obo/UBERON_0001234'}, + order=0, + type='AXON' + ) + ], + 'statement_alerts': [ + ('http://uri.interlex.org/alert/1', 'Alert message 1'), + ('http://uri.interlex.org/alert/2', 'Alert message 2') + ], + 'species': ['http://purl.obolibrary.org/obo/NCBITaxon_10090'], + 'sex': ['http://purl.obolibrary.org/obo/PATO_0000384'], + 'circuit_type': ['http://uri.interlex.org/base/ilx_0738400'], + 'populationset': 'test-pop', + '_neuron': object() # Mock non-serializable object + } + + print(f"Original statement has _neuron: {'_neuron' in mock_statement}") + print(f"Original origins type: {type(mock_statement['origins'])}") + print(f"Original vias count: {len(mock_statement['vias'])}") + print(f"Original destinations count: {len(mock_statement['destinations'])}") + print(f"Original statement_alerts: {mock_statement['statement_alerts']}") + print(f" - First alert type: {type(mock_statement['statement_alerts'][0])}") + + # Serialize + json_statement = convert_statement_to_json_serializable(mock_statement) + print(f"\nAfter serialization:") + print(f" - _neuron removed: {'_neuron' not in json_statement}") + print(f" - origins type: {type(json_statement['origins'])}") + print(f" - vias type: {type(json_statement['vias'])}") + print(f" - destinations type: {type(json_statement['destinations'])}") + print(f" - statement_alerts: {json_statement['statement_alerts']}") + print(f" - First alert type: {type(json_statement['statement_alerts'][0])}") + + # Verify JSON serializable + json_str = json.dumps(json_statement, indent=2) + print(f"\nJSON length: {len(json_str)} characters") + + # Load from JSON (simulating file save/load) + loaded_json = json.loads(json_str) + + # Deserialize + object_statement = convert_statement_from_json(loaded_json) + print(f"\nAfter deserialization:") + print(f" - origins type: {type(object_statement['origins'])}") + print(f" - vias type: {type(object_statement['vias'])}") + print(f" - destinations type: {type(object_statement['destinations'])}") + print(f" - statement_alerts: {object_statement['statement_alerts']}") + print(f" - First alert type: {type(object_statement['statement_alerts'][0])}") + + # Verify + assert '_neuron' not in object_statement, "_neuron should be removed!" + assert isinstance(object_statement['origins'], NeuronDMOrigin), "Origins should be NeuronDMOrigin!" + assert len(object_statement['vias']) == 1, "Vias count mismatch!" + assert isinstance(object_statement['vias'][0], NeuronDMVia), "Via should be NeuronDMVia!" + assert len(object_statement['destinations']) == 2, "Destinations count mismatch!" + assert isinstance(object_statement['destinations'][0], NeuronDMDestination), "Destination should be NeuronDMDestination!" + assert object_statement['id'] == mock_statement['id'], "ID mismatch!" + assert object_statement['vias'][0].order == 0, "Via order mismatch!" + assert object_statement['vias'][0].type == 'AXON', "Via type mismatch!" + + # Verify statement_alerts are properly restored as tuples + assert len(object_statement['statement_alerts']) == 2, "Statement alerts count mismatch!" + assert isinstance(object_statement['statement_alerts'][0], tuple), "Statement alert should be tuple!" + assert object_statement['statement_alerts'][0][0] == 'http://uri.interlex.org/alert/1', "Alert URI mismatch!" + assert object_statement['statement_alerts'][0][1] == 'Alert message 1', "Alert text mismatch!" + + print("✓ Round-trip successful!") + return True + + +def test_statement_alerts_serialization(): + """Test statement_alerts serialization/deserialization.""" + print("\n=== Testing Statement Alerts Serialization ===") + + # Create simple statement with alerts + statement = { + 'id': 'http://uri.interlex.org/test/statement/alerts', + 'statement_alerts': [ + ('http://uri.interlex.org/alert/warning1', 'Warning: Missing data'), + ('http://uri.interlex.org/alert/error1', 'Error: Invalid format'), + ('http://uri.interlex.org/alert/info1', 'Info: Processing complete') + ] + } + + print(f"Original alerts: {len(statement['statement_alerts'])} items") + print(f" - First alert: {statement['statement_alerts'][0]}") + print(f" - Type: {type(statement['statement_alerts'][0])}") + + # Serialize + json_statement = convert_statement_to_json_serializable(statement) + print(f"\nSerialized alerts:") + for i, alert in enumerate(json_statement['statement_alerts']): + print(f" - Alert {i+1}: {alert} (type: {type(alert)})") + + # Verify JSON serializable + json_str = json.dumps(json_statement, indent=2) + print(f"\nJSON representation:") + print(json_str) + + # Load from JSON + loaded_json = json.loads(json_str) + + # Deserialize + object_statement = convert_statement_from_json(loaded_json) + print(f"\nDeserialized alerts: {len(object_statement['statement_alerts'])} items") + print(f" - First alert: {object_statement['statement_alerts'][0]}") + print(f" - Type: {type(object_statement['statement_alerts'][0])}") + + # Verify alerts are tuples (as expected by create_or_update_statement_alert) + assert len(object_statement['statement_alerts']) == 3, "Alert count mismatch!" + for i, alert in enumerate(object_statement['statement_alerts']): + assert isinstance(alert, tuple), f"Alert {i} should be tuple, got {type(alert)}" + assert len(alert) == 2, f"Alert {i} should have 2 elements" + assert isinstance(alert[0], str), f"Alert {i} URI should be string" + assert isinstance(alert[1], str), f"Alert {i} text should be string" + + # Verify specific values + assert object_statement['statement_alerts'][0][0] == 'http://uri.interlex.org/alert/warning1' + assert object_statement['statement_alerts'][0][1] == 'Warning: Missing data' + assert object_statement['statement_alerts'][1][0] == 'http://uri.interlex.org/alert/error1' + assert object_statement['statement_alerts'][1][1] == 'Error: Invalid format' + + print("✓ Statement alerts serialization successful!") + return True + + +def test_validation_errors_removal(): + """Test that validation_errors are removed during serialization.""" + print("\n=== Testing Validation Errors Removal ===") + + # Create statement with validation_errors + statement = { + 'id': 'http://uri.interlex.org/test/statement/validation', + 'origins': NeuronDMOrigin({'http://test.org/entity1'}), + 'validation_errors': ValidationErrors(), + '_neuron': object() # Also test _neuron removal + } + + # Add some errors + statement['validation_errors'].entities.add('http://test.org/missing') + statement['validation_errors'].species.add('http://test.org/unknown_species') + + print(f"Original has validation_errors: {statement['validation_errors'].has_errors()}") + print(f" - Entities errors: {statement['validation_errors'].entities}") + print(f" - Species errors: {statement['validation_errors'].species}") + + # Serialize + json_statement = convert_statement_to_json_serializable(statement) + + print(f"\nAfter serialization:") + print(f" - Has validation_errors: {'validation_errors' in json_statement}") + print(f" - Has _neuron: {'_neuron' in json_statement}") + + # Verify JSON serializable + try: + json_str = json.dumps(json_statement, indent=2) + print(f"\n✓ Successfully converted to JSON ({len(json_str)} bytes)") + + # Verify validation_errors and _neuron are not in the JSON + assert 'validation_errors' not in json_statement, "validation_errors should be removed!" + assert '_neuron' not in json_statement, "_neuron should be removed!" + + print("✓ validation_errors and _neuron properly removed!") + return True + except TypeError as e: + print(f"\n✗ Failed to serialize: {e}") + return False + + +def test_multiple_statements(): + """Test serializing multiple statements (simulating batch processing).""" + print("\n=== Testing Multiple Statements ===") + + # Create multiple statements (without validation_errors) + statements = [] + for i in range(3): + stmt = { + 'id': f'http://uri.interlex.org/test/statement/{i}', + 'label': f'Test Neuron {i}', + 'origins': NeuronDMOrigin({f'http://test.org/entity{i}'}), + 'vias': [], + 'destinations': [], + } + statements.append(stmt) + + print(f"Created {len(statements)} statements") + + # Serialize all + json_statements = [convert_statement_to_json_serializable(stmt) for stmt in statements] + + # Save to JSON + json_str = json.dumps(json_statements, indent=2) + print(f"JSON size: {len(json_str)} characters") + + # Load and deserialize + loaded_json = json.loads(json_str) + object_statements = [convert_statement_from_json(stmt) for stmt in loaded_json] + + # Verify + assert len(object_statements) == 3, "Statement count mismatch!" + for i, stmt in enumerate(object_statements): + assert stmt['id'] == f'http://uri.interlex.org/test/statement/{i}', f"ID mismatch for statement {i}!" + assert isinstance(stmt['origins'], NeuronDMOrigin), f"Origins type mismatch for statement {i}!" + + print("✓ Multiple statements successful!") + return True + + +def main(): + """Run all tests.""" + print("=" * 60) + print("NeuronDM Serialization/Deserialization Tests") + print("=" * 60) + + results = [] + tests = [ + ("Origin Round-Trip", test_origin_round_trip), + ("Via Round-Trip", test_via_round_trip), + ("Destination Round-Trip", test_destination_round_trip), + ("Region-Layer Serialization", test_region_layer_serialization), + ("Statement Alerts Serialization", test_statement_alerts_serialization), + ("Validation Errors Removal", test_validation_errors_removal), + ("Statement Round-Trip", test_statement_round_trip), + ("Multiple Statements", test_multiple_statements), + ] + + for test_name, test_func in tests: + try: + success = test_func() + results.append((test_name, success, None)) + except Exception as e: + results.append((test_name, False, str(e))) + import traceback + traceback.print_exc() + + # Print summary + print("\n" + "=" * 60) + print("Test Summary") + print("=" * 60) + + passed = 0 + failed = 0 + for test_name, success, error in results: + if success: + print(f"✓ PASS: {test_name}") + passed += 1 + else: + print(f"✗ FAIL: {test_name}") + if error: + print(f" Error: {error}") + failed += 1 + + print(f"\nTotal: {passed} passed, {failed} failed out of {len(results)} tests") + + if failed > 0: + print("\n⚠ Some tests failed!") + sys.exit(1) + else: + print("\n✓ All tests passed!") + sys.exit(0) + + +if __name__ == '__main__': + main() diff --git a/applications/composer/backend/tests/test_ingest_statements.py b/applications/composer/backend/tests/test_ingest_statements.py index 3aca2982..c6a95c98 100644 --- a/applications/composer/backend/tests/test_ingest_statements.py +++ b/applications/composer/backend/tests/test_ingest_statements.py @@ -93,7 +93,10 @@ def test_population_uris_overwrite_functionality(self, mock_get_statements): mock_statements[1]['pref_label'] = "Updated knowledge statement 2" # Test: Only statement_1 should be updated when using population_uris + # Mock must return filtered results because we're mocking get_statements_from_neurondm + # which includes the filtering logic that happens in Step 1 population_uris = {statement_id_1} + mock_get_statements.return_value = [mock_statements[0]] # Return only filtered statement ingest_statements(population_uris=population_uris) # Verify results @@ -103,7 +106,7 @@ def test_population_uris_overwrite_functionality(self, mock_get_statements): # Statement 1 should be updated because it was in population_uris self.assertEqual(updated_statement_1.knowledge_statement, "Updated knowledge statement 1") - # Statement 2 should NOT be updated because it wasn't in population_uris and has non-overwritable state (NPO_APPROVED) + # Statement 2 should NOT be updated because it wasn't in population_uris self.assertEqual(updated_statement_2.knowledge_statement, "Original knowledge statement 2") @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') @@ -191,12 +194,14 @@ def test_population_uris_empty_set_filters_all(self, mock_get_statements): 'statement_alerts': [] } ] - mock_get_statements.return_value = mock_statements + # Mock must return empty list because we're mocking get_statements_from_neurondm + # which includes the filtering logic (population_uris=set() filters everything out) + mock_get_statements.return_value = [] - # Test with empty population_uris set (should filter everything) + # Test with empty population_uris set ingest_statements(population_uris=set()) - # Verify no statements were created (empty set = empty filter) + # Verify no statements were created self.assertEqual(ConnectivityStatement.objects.count(), 0) @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') @@ -316,7 +321,9 @@ def test_population_uris_filtering_behavior(self, mock_get_statements): 'statement_alerts': [] } ] - mock_get_statements.return_value = mock_statements + # Mock must return filtered statements because we're mocking get_statements_from_neurondm + # which includes the filtering logic that happens in neurondm_script.py + mock_get_statements.return_value = [mock_statements[0], mock_statements[2]] # Only liver and brain # Test: Only statements in population_uris should be processed population_uris = {statement_id_1, statement_id_3} # Only liver and brain, not heart @@ -327,7 +334,7 @@ def test_population_uris_filtering_behavior(self, mock_get_statements): # Verify the correct statements exist self.assertTrue(ConnectivityStatement.objects.filter(reference_uri=statement_id_1).exists()) - self.assertFalse(ConnectivityStatement.objects.filter(reference_uri=statement_id_2).exists()) # heart should not exist + self.assertFalse(ConnectivityStatement.objects.filter(reference_uri=statement_id_2).exists()) # heart not in mock return self.assertTrue(ConnectivityStatement.objects.filter(reference_uri=statement_id_3).exists()) @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') @@ -446,7 +453,10 @@ def test_population_uris_none_vs_empty_set_behavior(self, mock_get_statements): 'statement_alerts': [] } ] - mock_get_statements.return_value = mock_statements + # Mock must return different results for each call because we're mocking get_statements_from_neurondm + # First call (None): return all statements (no filtering) + # Second call (empty set): return empty list (everything filtered out) + mock_get_statements.side_effect = [mock_statements, []] # Test 1: None means no population file was provided - process all statements ingest_statements(population_uris=None) @@ -454,7 +464,7 @@ def test_population_uris_none_vs_empty_set_behavior(self, mock_get_statements): self.flush_connectivity_statements() - # Test 2: Empty set means population file was provided but empty - process no statements + # Test 2: Empty set means population file was provided but empty - process no statements ingest_statements(population_uris=set()) self.assertEqual(ConnectivityStatement.objects.count(), 0, "Empty set should process no statements") @@ -783,3 +793,974 @@ def test_statement_moves_to_invalid_when_export_fails_with_population_file(self, ) +class TestDynamicRelationships(TestCase): + """Test custom ingestion code for dynamic relationships""" + + def flush_connectivity_statements(self): + ConnectivityStatement.objects.all().delete() + + def create_mock_neuron(self): + """Create a simple mock neuron object with core_graph""" + from unittest.mock import MagicMock + from rdflib import Graph, Namespace, URIRef, Literal + + # Create a mock neuron with core_graph + neuron = MagicMock() + neuron.identifier = URIRef('http://uri.interlex.org/composer/neuron/123') + neuron.id_ = 'http://uri.interlex.org/composer/neuron/123' + + # Create a simple RDF graph with some test data + graph = Graph() + ilxtr = Namespace('http://uri.interlex.org/tgbugs/uris/readable/') + + # Add some test triples + neuron_uri = URIRef(str(neuron.identifier)) + graph.add((neuron_uri, ilxtr.hasCustomProperty, Literal('test_value'))) + graph.add((neuron_uri, ilxtr.hasCustomProperty, Literal('another_value'))) + + neuron.core_graph = graph + + return neuron + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_triple_relationship_with_custom_code(self, mock_get_statements): + """Test TRIPLE relationship type with custom ingestion code""" + from composer.models import Relationship, Triple, ConnectivityStatementTriple + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create a relationship with custom code that returns triples + relationship = Relationship.objects.create( + title="Custom Triple Relationship", + predicate_name="hasCustomTriple", + predicate_uri="http://uri.interlex.org/test/hasCustomTriple", + type=RelationshipType.TRIPLE_MULTI, + order=1, + custom_ingestion_code=""" +# Simple example that creates triples from fc data +result = [ + {'name': 'Triple 1', 'uri': 'http://uri.interlex.org/test/triple1'}, + {'name': 'Triple 2', 'uri': 'http://uri.interlex.org/test/triple2'} +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/1' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: [ + {'name': 'Triple 1', 'uri': 'http://uri.interlex.org/test/triple1'}, + {'name': 'Triple 2', 'uri': 'http://uri.interlex.org/test/triple2'} + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify triples were created + self.assertEqual(Triple.objects.filter(relationship=relationship).count(), 2) + + # Verify ConnectivityStatementTriple was created and linked + cs_triple = ConnectivityStatementTriple.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + self.assertEqual(cs_triple.triples.count(), 2) + + # Verify triple content + triple_names = set(cs_triple.triples.values_list('name', flat=True)) + self.assertEqual(triple_names, {'Triple 1', 'Triple 2'}) + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_text_relationship_with_custom_code(self, mock_get_statements): + """Test TEXT relationship type with custom ingestion code""" + from composer.models import Relationship, ConnectivityStatementText + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create a relationship with custom code that returns text + relationship = Relationship.objects.create( + title="Custom Text Relationship", + predicate_name="hasCustomText", + predicate_uri="http://uri.interlex.org/test/hasCustomText", + type=RelationshipType.TEXT, + order=1, + custom_ingestion_code=""" +# Example that uses fc dict to generate text +result = f"Neuron ID: {fc['id']}, Label: {fc['label']}" +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/2' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron label', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: f"Neuron ID: {statement_id}, Label: test neuron label" + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify text relationship was created + cs_text = ConnectivityStatementText.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + + # Verify text content includes data from fc dict + self.assertIn('test neuron label', cs_text.text) + self.assertIn(statement_id, cs_text.text) + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_anatomical_relationship_with_custom_code(self, mock_get_statements): + """Test ANATOMICAL_ENTITY relationship type with custom ingestion code""" + from composer.models import Relationship, AnatomicalEntity, AnatomicalEntityMeta, ConnectivityStatementAnatomicalEntity + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create some anatomical entities to reference + meta1 = AnatomicalEntityMeta.objects.create( + name="Test Entity 1", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0001234' + ) + ae1 = AnatomicalEntity.objects.create( + simple_entity=meta1 + ) + meta2 = AnatomicalEntityMeta.objects.create( + name="Test Entity 2", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0005678' + ) + ae2 = AnatomicalEntity.objects.create( + simple_entity=meta2 + ) + + # Create a relationship with custom code that returns anatomical entity URIs + relationship = Relationship.objects.create( + title="Custom Anatomical Relationship", + predicate_name="hasCustomAnatomy", + predicate_uri="http://uri.interlex.org/test/hasCustomAnatomy", + type=RelationshipType.ANATOMICAL_MULTI, + order=1, + custom_ingestion_code=""" +# Example that returns anatomical entity URIs +result = [ + 'http://purl.obolibrary.org/obo/UBERON_0001234', + 'http://purl.obolibrary.org/obo/UBERON_0005678' +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/3' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: [ + 'http://purl.obolibrary.org/obo/UBERON_0001234', + 'http://purl.obolibrary.org/obo/UBERON_0005678' + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify anatomical relationship was created + cs_ae = ConnectivityStatementAnatomicalEntity.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + + # Verify anatomical entities were linked + self.assertEqual(cs_ae.anatomical_entities.count(), 2) + linked_entities = set(cs_ae.anatomical_entities.values_list('simple_entity__ontology_uri', flat=True)) + self.assertEqual(linked_entities, { + 'http://purl.obolibrary.org/obo/UBERON_0001234', + 'http://purl.obolibrary.org/obo/UBERON_0005678' + }) + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_custom_code_with_neuron_access(self, mock_get_statements): + """Test that custom code can access the _neuron object from fc dict""" + from composer.models import Relationship, ConnectivityStatementText + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create a relationship with custom code that accesses the neuron object + relationship = Relationship.objects.create( + title="Neuron Access Relationship", + predicate_name="hasNeuronData", + predicate_uri="http://uri.interlex.org/test/hasNeuronData", + type=RelationshipType.TEXT, + order=1, + custom_ingestion_code=""" +# Example that accesses the neuron object +neuron = fc['_neuron'] +# Access neuron properties +result = f"Neuron ID: {neuron.identifier}, Has core_graph: {hasattr(neuron, 'core_graph')}" +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/4' + + # Create mock neuron with RDF data + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: f"Neuron ID: {mock_neuron.identifier}, Has core_graph: True" + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify text relationship was created with neuron data + cs_text = ConnectivityStatementText.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + + # Verify the custom code accessed the neuron object + self.assertIn('Neuron ID:', cs_text.text) + self.assertIn('Has core_graph: True', cs_text.text) + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_custom_code_error_handling(self, mock_get_statements): + """Test that errors in custom code are logged and don't break ingestion""" + from composer.models import Relationship + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create a relationship with custom code that will raise an error + relationship = Relationship.objects.create( + title="Error Relationship", + predicate_name="hasError", + predicate_uri="http://uri.interlex.org/test/hasError", + type=RelationshipType.TEXT, + order=1, + custom_ingestion_code=""" +# This code will raise an error +raise ValueError("Test error in custom code") +result = "This won't be reached" +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/5' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # No custom relationship results because the code raised an error + '_custom_relationship_results': {} + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion - should not crash despite error in custom code + ingest_statements() + + # Verify statement was still created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # The relationship should not be created due to error + from composer.models import ConnectivityStatementText + cs_texts = ConnectivityStatementText.objects.filter( + connectivity_statement=statement, + relationship=relationship + ) + self.assertEqual(cs_texts.count(), 0, "No text relationship should be created when custom code fails") + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_custom_code_missing_result_variable(self, mock_get_statements): + """Test that custom code without 'result' variable is handled gracefully""" + from composer.models import Relationship, ConnectivityStatementText + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create a relationship with custom code that doesn't define 'result' + relationship = Relationship.objects.create( + title="Missing Result Relationship", + predicate_name="hasMissingResult", + predicate_uri="http://uri.interlex.org/test/hasMissingResult", + type=RelationshipType.TEXT, + order=1, + custom_ingestion_code=""" +# This code doesn't define 'result' variable +some_value = "This is not named 'result'" +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/6' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # No custom relationship results because 'result' was not defined + '_custom_relationship_results': {} + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # The relationship should not be created due to missing 'result' + cs_texts = ConnectivityStatementText.objects.filter( + connectivity_statement=statement, + relationship=relationship + ) + self.assertEqual(cs_texts.count(), 0, "No text relationship should be created when 'result' is not defined") + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_anatomical_relationship_with_region_layer_pairs(self, mock_get_statements): + """Test ANATOMICAL_ENTITY relationship with region-layer pairs""" + from composer.models import Relationship, AnatomicalEntity, AnatomicalEntityMeta, Layer, Region, ConnectivityStatementAnatomicalEntity + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create layer and region anatomical entity metas + layer_meta = AnatomicalEntityMeta.objects.create( + name="Layer 1", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0001234' + ) + region_meta = AnatomicalEntityMeta.objects.create( + name="Region 1", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0005678' + ) + + # Create Layer and Region objects + layer = Layer.objects.create(ae_meta=layer_meta) + region = Region.objects.create(ae_meta=region_meta) + + # Create a relationship with custom code that returns region-layer pairs + relationship = Relationship.objects.create( + title="Custom Region-Layer Relationship", + predicate_name="hasRegionLayer", + predicate_uri="http://uri.interlex.org/test/hasRegionLayer", + type=RelationshipType.ANATOMICAL_MULTI, + order=1, + custom_ingestion_code=""" +# Example that returns region-layer pairs +result = [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0005678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0001234'} +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/7' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0005678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0001234'} + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify anatomical relationship was created + cs_ae = ConnectivityStatementAnatomicalEntity.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + + # Verify region-layer pair was linked + self.assertEqual(cs_ae.anatomical_entities.count(), 1) + ae = cs_ae.anatomical_entities.first() + self.assertIsNotNone(ae.region_layer) + self.assertEqual(ae.region_layer.layer.ontology_uri, 'http://purl.obolibrary.org/obo/UBERON_0001234') + self.assertEqual(ae.region_layer.region.ontology_uri, 'http://purl.obolibrary.org/obo/UBERON_0005678') + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_anatomical_relationship_with_mixed_formats(self, mock_get_statements): + """Test ANATOMICAL_ENTITY relationship with mixed simple entities and region-layer pairs""" + from composer.models import Relationship, AnatomicalEntity, AnatomicalEntityMeta, Layer, Region, ConnectivityStatementAnatomicalEntity + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create simple entity + simple_meta = AnatomicalEntityMeta.objects.create( + name="Simple Entity", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0001111' + ) + simple_ae = AnatomicalEntity.objects.create(simple_entity=simple_meta) + + # Create layer and region + layer_meta = AnatomicalEntityMeta.objects.create( + name="Layer 2", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0002234' + ) + region_meta = AnatomicalEntityMeta.objects.create( + name="Region 2", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0002678' + ) + layer = Layer.objects.create(ae_meta=layer_meta) + region = Region.objects.create(ae_meta=region_meta) + + # Create a relationship with custom code that returns mixed formats + relationship = Relationship.objects.create( + title="Custom Mixed Format Relationship", + predicate_name="hasMixedEntities", + predicate_uri="http://uri.interlex.org/test/hasMixedEntities", + type=RelationshipType.ANATOMICAL_MULTI, + order=1, + custom_ingestion_code=""" +# Example that returns both simple entities and region-layer pairs +result = [ + 'http://purl.obolibrary.org/obo/UBERON_0001111', # Simple entity + {'region': 'http://purl.obolibrary.org/obo/UBERON_0002678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0002234'} # Region-layer pair +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/8' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: [ + 'http://purl.obolibrary.org/obo/UBERON_0001111', + {'region': 'http://purl.obolibrary.org/obo/UBERON_0002678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0002234'} + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion + ingest_statements() + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify anatomical relationship was created + cs_ae = ConnectivityStatementAnatomicalEntity.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + + # Verify both entities were linked (1 simple + 1 region-layer pair) + self.assertEqual(cs_ae.anatomical_entities.count(), 2) + + # Verify simple entity exists + simple_entities = [ae for ae in cs_ae.anatomical_entities.all() if ae.simple_entity is not None] + self.assertEqual(len(simple_entities), 1) + self.assertEqual(simple_entities[0].simple_entity.ontology_uri, 'http://purl.obolibrary.org/obo/UBERON_0001111') + + # Verify region-layer entity exists + rl_entities = [ae for ae in cs_ae.anatomical_entities.all() if ae.region_layer is not None] + self.assertEqual(len(rl_entities), 1) + self.assertEqual(rl_entities[0].region_layer.layer.ontology_uri, 'http://purl.obolibrary.org/obo/UBERON_0002234') + self.assertEqual(rl_entities[0].region_layer.region.ontology_uri, 'http://purl.obolibrary.org/obo/UBERON_0002678') + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_anatomical_relationship_update_entities_flag_false(self, mock_get_statements): + """Test that region-layer pairs fail when update_anatomical_entities=False and Layer/Region don't exist""" + from composer.models import Relationship, AnatomicalEntityMeta, Layer, Region, ConnectivityStatementAnatomicalEntity + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create layer and region metas but NOT Layer/Region objects + layer_meta = AnatomicalEntityMeta.objects.create( + name="Layer 3", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0003234' + ) + region_meta = AnatomicalEntityMeta.objects.create( + name="Region 3", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0003678' + ) + # Deliberately NOT creating Layer and Region objects + + # Create a relationship + relationship = Relationship.objects.create( + title="Test Region-Layer Without Update Flag", + predicate_name="hasRegionLayerNoUpdate", + predicate_uri="http://uri.interlex.org/test/hasRegionLayerNoUpdate", + type=RelationshipType.ANATOMICAL_MULTI, + order=1, + custom_ingestion_code=""" +result = [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0003678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0003234'} +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/9' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0003678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0003234'} + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion with update_anatomical_entities=False (default) + ingest_statements(update_anatomical_entities=False) + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify anatomical relationship was NOT created due to missing Layer/Region + cs_ae_count = ConnectivityStatementAnatomicalEntity.objects.filter( + connectivity_statement=statement, + relationship=relationship + ).count() + + # Should be 0 or have 0 anatomical entities linked (error was logged) + if cs_ae_count > 0: + cs_ae = ConnectivityStatementAnatomicalEntity.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + self.assertEqual(cs_ae.anatomical_entities.count(), 0, + "No anatomical entities should be linked when Layer/Region don't exist and update flag is False") + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_anatomical_relationship_update_entities_flag_true(self, mock_get_statements): + """Test that region-layer pairs are created when update_anatomical_entities=True and Layer/Region don't exist""" + from composer.models import Relationship, AnatomicalEntityMeta, Layer, Region, ConnectivityStatementAnatomicalEntity + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create layer and region metas but NOT Layer/Region objects + layer_meta = AnatomicalEntityMeta.objects.create( + name="Layer 4", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0004234' + ) + region_meta = AnatomicalEntityMeta.objects.create( + name="Region 4", + ontology_uri='http://purl.obolibrary.org/obo/UBERON_0004678' + ) + # Deliberately NOT creating Layer and Region objects initially + + # Create a relationship + relationship = Relationship.objects.create( + title="Test Region-Layer With Update Flag", + predicate_name="hasRegionLayerWithUpdate", + predicate_uri="http://uri.interlex.org/test/hasRegionLayerWithUpdate", + type=RelationshipType.ANATOMICAL_MULTI, + order=1, + custom_ingestion_code=""" +result = [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0004678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0004234'} +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/10' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results (from Step 1) + '_custom_relationship_results': { + relationship.id: [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0004678', 'layer': 'http://purl.obolibrary.org/obo/UBERON_0004234'} + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion with update_anatomical_entities=True + ingest_statements(update_anatomical_entities=True) + + # Verify statement was created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify anatomical relationship was created + cs_ae = ConnectivityStatementAnatomicalEntity.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + + # Verify region-layer pair was created and linked + self.assertEqual(cs_ae.anatomical_entities.count(), 1) + ae = cs_ae.anatomical_entities.first() + self.assertIsNotNone(ae.region_layer) + + # Verify Layer and Region objects were automatically created + layer = Layer.objects.get(ae_meta__ontology_uri='http://purl.obolibrary.org/obo/UBERON_0004234') + region = Region.objects.get(ae_meta__ontology_uri='http://purl.obolibrary.org/obo/UBERON_0004678') + self.assertIsNotNone(layer) + self.assertIsNotNone(region) + + # Verify the created anatomical entity uses the correct layer and region + self.assertEqual(ae.region_layer.layer, layer.ae_meta) + self.assertEqual(ae.region_layer.region, region.ae_meta) + + @patch('composer.services.cs_ingestion.cs_ingestion_services.get_statements_from_neurondm') + def test_anatomical_relationship_invalid_dict_format(self, mock_get_statements): + """Test that invalid dict formats (missing 'region' or 'layer' keys) are logged as errors""" + from composer.models import Relationship, ConnectivityStatementAnatomicalEntity + from composer.enums import RelationshipType + + self.flush_connectivity_statements() + + # Create a relationship with custom code that returns invalid dict format + relationship = Relationship.objects.create( + title="Test Invalid Dict Format", + predicate_name="hasInvalidDict", + predicate_uri="http://uri.interlex.org/test/hasInvalidDict", + type=RelationshipType.ANATOMICAL_MULTI, + order=1, + custom_ingestion_code=""" +# Invalid format - missing 'layer' key +result = [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0005678'} +] +""" + ) + + statement_id = 'http://uri.interlex.org/composer/uris/set/test/11' + + # Create mock neuron + mock_neuron = self.create_mock_neuron() + + mock_statements = [ + { + 'id': statement_id, + 'label': 'test neuron type', + 'pref_label': 'test connectivity statement', + 'origins': NeuronDMOrigin(set()), + 'destinations': [NeuronDMDestination(set(), set(), 'AXON-T')], + 'populationset': 'test', + 'vias': [], + 'species': [], + 'sex': [], + 'circuit_type': [], + 'circuit_role': [], + 'phenotype': [], + 'other_phenotypes': [], + 'forward_connection': [], + 'provenance': ['http://dx.doi.org/10.1126/test'], + 'sentence_number': [], + 'note_alert': [], + 'validation_errors': ValidationErrors(), + 'statement_alerts': [], + '_neuron': mock_neuron, + # Pre-computed custom relationship results with invalid format + '_custom_relationship_results': { + relationship.id: [ + {'region': 'http://purl.obolibrary.org/obo/UBERON_0005678'} # Missing 'layer' key + ] + } + } + ] + mock_get_statements.return_value = mock_statements + + # Run ingestion - should not crash despite invalid format + ingest_statements() + + # Verify statement was still created + statement = ConnectivityStatement.objects.get(reference_uri=statement_id) + self.assertIsNotNone(statement) + + # Verify anatomical relationship was NOT created or has no entities due to invalid format + cs_ae_count = ConnectivityStatementAnatomicalEntity.objects.filter( + connectivity_statement=statement, + relationship=relationship + ).count() + + if cs_ae_count > 0: + cs_ae = ConnectivityStatementAnatomicalEntity.objects.get( + connectivity_statement=statement, + relationship=relationship + ) + self.assertEqual(cs_ae.anatomical_entities.count(), 0, + "No anatomical entities should be linked when dict format is invalid") + + diff --git a/applications/composer/backend/tests/test_relationships.py b/applications/composer/backend/tests/test_relationships.py new file mode 100644 index 00000000..effeeae1 --- /dev/null +++ b/applications/composer/backend/tests/test_relationships.py @@ -0,0 +1,194 @@ +""" +Tests for ConnectivityStatement relationship models (Triple, Text, AnatomicalEntity). +""" +import pytest +from django.core.exceptions import ValidationError +from django.test import TestCase +from composer.models import ( + ConnectivityStatement, + ConnectivityStatementTriple, + ConnectivityStatementText, + ConnectivityStatementAnatomicalEntity, + Relationship, + RelationshipType, + Sentence, + AnatomicalEntity, + AnatomicalEntityMeta, + Triple, +) + + +@pytest.mark.django_db +class TestConnectivityStatementRelationships(TestCase): + """Test suite for connectivity statement relationships.""" + + def setUp(self): + """Set up test fixtures.""" + # Create required related objects + self.sentence = Sentence.objects.create() + + # Create a connectivity statement + self.statement = ConnectivityStatement.objects.create( + sentence=self.sentence, + knowledge_statement="Test knowledge statement", + ) + + # Create anatomical entities for testing + meta1 = AnatomicalEntityMeta.objects.create( + name="Entity 1", + ontology_uri="http://example.org/entity1" + ) + self.entity1 = AnatomicalEntity.objects.create( + simple_entity=meta1 + ) + meta2 = AnatomicalEntityMeta.objects.create( + name="Entity 2", + ontology_uri="http://example.org/entity2" + ) + self.entity2 = AnatomicalEntity.objects.create( + simple_entity=meta2 + ) + + # Create relationships of different types + self.rel_triple_single = Relationship.objects.create( + title="Test Triple Single", + predicate_name="has_triple_single", + predicate_uri="http://example.org/has_triple_single", + type=RelationshipType.TRIPLE_SINGLE, + ) + self.rel_triple_multi = Relationship.objects.create( + title="Test Triple Multi", + predicate_name="has_triple_multi", + predicate_uri="http://example.org/has_triple_multi", + type=RelationshipType.TRIPLE_MULTI, + ) + self.rel_text = Relationship.objects.create( + title="Test Text", + predicate_name="has_text", + predicate_uri="http://example.org/has_text", + type=RelationshipType.TEXT, + ) + + self.rel_anatomical_multi = Relationship.objects.create( + title="Test Anatomical Multi", + predicate_name="has_anatomical_multi", + predicate_uri="http://example.org/has_anatomical_multi", + type=RelationshipType.ANATOMICAL_MULTI, + ) + + # Create triples for testing + self.triple1 = Triple.objects.create( + relationship=self.rel_triple_single, + name="Triple 1", + uri="http://example.org/triple1" + ) + self.triple2 = Triple.objects.create( + relationship=self.rel_triple_multi, + name="Triple 2", + uri="http://example.org/triple2" + ) + + def test_create_triple_single_relationship(self): + """Test creating a single-select triple relationship.""" + # Create the relationship object + stmt_triple = ConnectivityStatementTriple.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_triple_single, + ) + + # Add one triple - should succeed + stmt_triple.triples.add(self.triple1) + stmt_triple.save() + + self.assertEqual(stmt_triple.triples.count(), 1) + + # Note: M2M count validation is skipped for now to avoid timing issues + # Validation is still enforced at the API level via serializers + + def test_create_triple_multi_relationship(self): + """Test creating a multi-select triple relationship.""" + stmt_triple = ConnectivityStatementTriple.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_triple_multi, + ) + + # Add multiple triples - should succeed + stmt_triple.triples.add(self.triple1, self.triple2) + stmt_triple.save() + + self.assertEqual(stmt_triple.triples.count(), 2) + + def test_create_anatomical_multi_relationship(self): + """Test creating a multi-select anatomical entity relationship.""" + stmt_anatomical = ConnectivityStatementAnatomicalEntity.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_anatomical_multi, + ) + + # Add multiple entities - should succeed + stmt_anatomical.anatomical_entities.add(self.entity1, self.entity2) + stmt_anatomical.save() + + self.assertEqual(stmt_anatomical.anatomical_entities.count(), 2) + + def test_create_text_relationship(self): + """Test creating a text relationship.""" + stmt_text = ConnectivityStatementText.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_text, + text="Test text value", + ) + + self.assertEqual(stmt_text.text, "Test text value") + + def test_triple_relationship_wrong_type_fails(self): + """Test that using a non-triple relationship type with ConnectivityStatementTriple fails.""" + with self.assertRaises(ValidationError) as context: + ConnectivityStatementTriple.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_text, # Wrong type! + ) + + self.assertIn("should only be used for triple relationships", str(context.exception)) + + def test_anatomical_relationship_wrong_type_fails(self): + """Test that using a non-anatomical relationship type with ConnectivityStatementAnatomicalEntity fails.""" + with self.assertRaises(ValidationError) as context: + ConnectivityStatementAnatomicalEntity.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_text, # Wrong type! + ) + + self.assertIn("should only be used for anatomical entity relationships", str(context.exception)) + + def test_text_relationship_wrong_type_fails(self): + """Test that using a non-text relationship type with ConnectivityStatementText fails.""" + with self.assertRaises(ValidationError) as context: + ConnectivityStatementText.objects.create( + connectivity_statement=self.statement, + relationship=self.rel_triple_single, # Wrong type! + text="Some text", + ) + + self.assertIn("should only be used for text relationships", str(context.exception)) + + def test_create_relationship_without_relationship_field(self): + """ + Test that creating a relationship object without setting the relationship field + doesn't crash (reproduces the admin inline form issue). + """ + # This simulates what happens in Django admin when creating a new inline form + # The relationship field might not be set yet when validation runs + stmt_triple = ConnectivityStatementTriple( + connectivity_statement=self.statement, + ) + + # The clean() method should handle the case where relationship is not set yet + # It should either skip validation or raise a clear error + try: + stmt_triple.clean() + except AttributeError as e: + self.fail(f"clean() raised AttributeError when relationship not set: {e}") + except ValidationError: + # ValidationError is acceptable, but not AttributeError + pass diff --git a/applications/composer/backend/version.py b/applications/composer/backend/version.py index 221603aa..d8fc4261 100644 --- a/applications/composer/backend/version.py +++ b/applications/composer/backend/version.py @@ -1 +1 @@ -VERSION = "5.2.0" +VERSION = "6.0.0" diff --git a/applications/composer/frontend/package.json b/applications/composer/frontend/package.json index de1b433b..37fccb9b 100644 --- a/applications/composer/frontend/package.json +++ b/applications/composer/frontend/package.json @@ -1,6 +1,6 @@ { "name": "frontend", - "version": "5.2.3", + "version": "6.0.0", "private": true, "main": "index.js", "proxy": "http://127.0.0.1:8000/", diff --git a/applications/composer/frontend/src/apiclient/backend/api.ts b/applications/composer/frontend/src/apiclient/backend/api.ts index 89fb30fe..7d44d762 100644 --- a/applications/composer/frontend/src/apiclient/backend/api.ts +++ b/applications/composer/frontend/src/apiclient/backend/api.ts @@ -404,6 +404,12 @@ export interface ConnectivityStatement { * @memberof ConnectivityStatement */ 'provenances'?: Array; + /** + * + * @type {Array} + * @memberof ConnectivityStatement + */ + 'expert_consultants'?: Array; /** * * @type {User} @@ -602,6 +608,49 @@ export interface ConnectivityStatement { * @memberof ConnectivityStatement */ 'statement_triples': string; + /** + * + * @type {string} + * @memberof ConnectivityStatement + */ + 'statement_texts': string; + /** + * + * @type {string} + * @memberof ConnectivityStatement + */ + 'statement_anatomical_entities': string; +} +/** + * Serializer for anatomical entity-based relationships + * @export + * @interface ConnectivityStatementAnatomicalEntity + */ +export interface ConnectivityStatementAnatomicalEntity { + /** + * + * @type {number} + * @memberof ConnectivityStatementAnatomicalEntity + */ + 'id': number; + /** + * + * @type {number} + * @memberof ConnectivityStatementAnatomicalEntity + */ + 'connectivity_statement': number; + /** + * + * @type {number} + * @memberof ConnectivityStatementAnatomicalEntity + */ + 'relationship': number; + /** + * + * @type {Array} + * @memberof ConnectivityStatementAnatomicalEntity + */ + 'anatomical_entities': Array; } /** * @type ConnectivityStatementCircuitType @@ -622,7 +671,38 @@ export type ConnectivityStatementLaterality = BlankEnum | LateralityEnum; export type ConnectivityStatementProjection = BlankEnum | ProjectionEnum; /** - * + * Serializer for text-based relationships (free text area) + * @export + * @interface ConnectivityStatementText + */ +export interface ConnectivityStatementText { + /** + * + * @type {number} + * @memberof ConnectivityStatementText + */ + 'id': number; + /** + * + * @type {number} + * @memberof ConnectivityStatementText + */ + 'connectivity_statement': number; + /** + * + * @type {number} + * @memberof ConnectivityStatementText + */ + 'relationship': number; + /** + * + * @type {string} + * @memberof ConnectivityStatementText + */ + 'text': string; +} +/** + * Serializer for triple-based relationships (single/multi select from triples) * @export * @interface ConnectivityStatementTriple */ @@ -647,10 +727,10 @@ export interface ConnectivityStatementTriple { 'relationship': number; /** * - * @type {string} + * @type {Array} * @memberof ConnectivityStatementTriple */ - 'value': string; + 'triples': Array; } /** * Connectivity Statement @@ -974,6 +1054,44 @@ export const DestinationTypeEmum = { export type DestinationTypeEmum = typeof DestinationTypeEmum[keyof typeof DestinationTypeEmum]; +/** + * Expert Consultant + * @export + * @interface ExpertConsultant + */ +export interface ExpertConsultant { + /** + * + * @type {number} + * @memberof ExpertConsultant + */ + 'id': number; + /** + * + * @type {string} + * @memberof ExpertConsultant + */ + 'uri': string; + /** + * + * @type {number} + * @memberof ExpertConsultant + */ + 'connectivity_statement_id': number; +} +/** + * Serializer for creating expert consultant via request body + * @export + * @interface ExpertConsultantCreate + */ +export interface ExpertConsultantCreate { + /** + * + * @type {string} + * @memberof ExpertConsultantCreate + */ + 'uri': string; +} /** * * @export @@ -1393,6 +1511,68 @@ export interface PaginatedBaseConnectivityStatementList { */ 'results'?: Array; } +/** + * + * @export + * @interface PaginatedConnectivityStatementAnatomicalEntityList + */ +export interface PaginatedConnectivityStatementAnatomicalEntityList { + /** + * + * @type {number} + * @memberof PaginatedConnectivityStatementAnatomicalEntityList + */ + 'count'?: number; + /** + * + * @type {string} + * @memberof PaginatedConnectivityStatementAnatomicalEntityList + */ + 'next'?: string | null; + /** + * + * @type {string} + * @memberof PaginatedConnectivityStatementAnatomicalEntityList + */ + 'previous'?: string | null; + /** + * + * @type {Array} + * @memberof PaginatedConnectivityStatementAnatomicalEntityList + */ + 'results'?: Array; +} +/** + * + * @export + * @interface PaginatedConnectivityStatementTextList + */ +export interface PaginatedConnectivityStatementTextList { + /** + * + * @type {number} + * @memberof PaginatedConnectivityStatementTextList + */ + 'count'?: number; + /** + * + * @type {string} + * @memberof PaginatedConnectivityStatementTextList + */ + 'next'?: string | null; + /** + * + * @type {string} + * @memberof PaginatedConnectivityStatementTextList + */ + 'previous'?: string | null; + /** + * + * @type {Array} + * @memberof PaginatedConnectivityStatementTextList + */ + 'results'?: Array; +} /** * * @export @@ -1869,6 +2049,12 @@ export interface PatchedConnectivityStatement { * @memberof PatchedConnectivityStatement */ 'provenances'?: Array; + /** + * + * @type {Array} + * @memberof PatchedConnectivityStatement + */ + 'expert_consultants'?: Array; /** * * @type {User} @@ -2067,9 +2253,83 @@ export interface PatchedConnectivityStatement { * @memberof PatchedConnectivityStatement */ 'statement_triples'?: string; + /** + * + * @type {string} + * @memberof PatchedConnectivityStatement + */ + 'statement_texts'?: string; + /** + * + * @type {string} + * @memberof PatchedConnectivityStatement + */ + 'statement_anatomical_entities'?: string; } /** - * + * Serializer for anatomical entity-based relationships + * @export + * @interface PatchedConnectivityStatementAnatomicalEntity + */ +export interface PatchedConnectivityStatementAnatomicalEntity { + /** + * + * @type {number} + * @memberof PatchedConnectivityStatementAnatomicalEntity + */ + 'id'?: number; + /** + * + * @type {number} + * @memberof PatchedConnectivityStatementAnatomicalEntity + */ + 'connectivity_statement'?: number; + /** + * + * @type {number} + * @memberof PatchedConnectivityStatementAnatomicalEntity + */ + 'relationship'?: number; + /** + * + * @type {Array} + * @memberof PatchedConnectivityStatementAnatomicalEntity + */ + 'anatomical_entities'?: Array; +} +/** + * Serializer for text-based relationships (free text area) + * @export + * @interface PatchedConnectivityStatementText + */ +export interface PatchedConnectivityStatementText { + /** + * + * @type {number} + * @memberof PatchedConnectivityStatementText + */ + 'id'?: number; + /** + * + * @type {number} + * @memberof PatchedConnectivityStatementText + */ + 'connectivity_statement'?: number; + /** + * + * @type {number} + * @memberof PatchedConnectivityStatementText + */ + 'relationship'?: number; + /** + * + * @type {string} + * @memberof PatchedConnectivityStatementText + */ + 'text'?: string; +} +/** + * Serializer for triple-based relationships (single/multi select from triples) * @export * @interface PatchedConnectivityStatementTriple */ @@ -2094,10 +2354,10 @@ export interface PatchedConnectivityStatementTriple { 'relationship'?: number; /** * - * @type {string} + * @type {Array} * @memberof PatchedConnectivityStatementTriple */ - 'value'?: string; + 'triples'?: Array; } /** * Connectivity Statement @@ -2824,8 +3084,9 @@ export interface Relationship { */ export const RelationshipTypeEnum = { - Single: 'single', - Multi: 'multi', + TripleSingle: 'triple_single', + TripleMulti: 'triple_multi', + AnatomicalMulti: 'anatomical_multi', Text: 'text' } as const; @@ -3597,6 +3858,54 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat options: localVarRequestOptions, }; }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {ExpertConsultantCreate} expertConsultantCreate + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAddExpertConsultantCreate: async (id: number, expertConsultantCreate: ExpertConsultantCreate, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementAddExpertConsultantCreate', 'id', id) + // verify required parameter 'expertConsultantCreate' is not null or undefined + assertParamExists('composerConnectivityStatementAddExpertConsultantCreate', 'expertConsultantCreate', expertConsultantCreate) + const localVarPath = `/api/composer/connectivity-statement/{id}/add_expert_consultant/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(expertConsultantCreate, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -3738,17 +4047,15 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {PatchedConnectivityStatement} [patchedConnectivityStatement] + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementAssignOwnerPartialUpdate: async (id: number, patchedConnectivityStatement?: PatchedConnectivityStatement, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementAssignOwnerPartialUpdate', 'id', id) - const localVarPath = `/api/composer/connectivity-statement/{id}/assign_owner/` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); + composerConnectivityStatementAnatomicalEntityCreate: async (connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'connectivityStatementAnatomicalEntity' is not null or undefined + assertParamExists('composerConnectivityStatementAnatomicalEntityCreate', 'connectivityStatementAnatomicalEntity', connectivityStatementAnatomicalEntity) + const localVarPath = `/api/composer/connectivityStatementAnatomicalEntity/`; // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); let baseOptions; @@ -3756,7 +4063,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'PATCH', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -3776,7 +4083,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(patchedConnectivityStatement, localVarRequestOptions, configuration) + localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatementAnatomicalEntity, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -3784,7 +4091,279 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * Returns available users for assignment and possible state transitions for the selected items. + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityDestroy: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementAnatomicalEntityDestroy', 'id', id) + const localVarPath = `/api/composer/connectivityStatementAnatomicalEntity/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityList: async (limit?: number, offset?: number, options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/api/composer/connectivityStatementAnatomicalEntity/`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + if (limit !== undefined) { + localVarQueryParameter['limit'] = limit; + } + + if (offset !== undefined) { + localVarQueryParameter['offset'] = offset; + } + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {PatchedConnectivityStatementAnatomicalEntity} [patchedConnectivityStatementAnatomicalEntity] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityPartialUpdate: async (id: number, patchedConnectivityStatementAnatomicalEntity?: PatchedConnectivityStatementAnatomicalEntity, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementAnatomicalEntityPartialUpdate', 'id', id) + const localVarPath = `/api/composer/connectivityStatementAnatomicalEntity/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PATCH', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(patchedConnectivityStatementAnatomicalEntity, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityRetrieve: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementAnatomicalEntityRetrieve', 'id', id) + const localVarPath = `/api/composer/connectivityStatementAnatomicalEntity/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityUpdate: async (id: number, connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementAnatomicalEntityUpdate', 'id', id) + // verify required parameter 'connectivityStatementAnatomicalEntity' is not null or undefined + assertParamExists('composerConnectivityStatementAnatomicalEntityUpdate', 'connectivityStatementAnatomicalEntity', connectivityStatementAnatomicalEntity) + const localVarPath = `/api/composer/connectivityStatementAnatomicalEntity/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatementAnatomicalEntity, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {PatchedConnectivityStatement} [patchedConnectivityStatement] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAssignOwnerPartialUpdate: async (id: number, patchedConnectivityStatement?: PatchedConnectivityStatement, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementAssignOwnerPartialUpdate', 'id', id) + const localVarPath = `/api/composer/connectivity-statement/{id}/assign_owner/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'PATCH', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(patchedConnectivityStatement, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * Returns available users for assignment and possible state transitions for the selected items. * @param {Array} [destinations] * @param {Array} [excludeIds] Multiple values may be separated by commas. * @param {number} [excludeSentenceId] @@ -4065,12 +4644,401 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat - localVarHeaderParameter['Content-Type'] = 'application/json'; - + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatement, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} expertConsultantId + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDelExpertConsultantDestroy: async (expertConsultantId: number, id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'expertConsultantId' is not null or undefined + assertParamExists('composerConnectivityStatementDelExpertConsultantDestroy', 'expertConsultantId', expertConsultantId) + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementDelExpertConsultantDestroy', 'id', id) + const localVarPath = `/api/composer/connectivity-statement/{id}/del_expert_consultant/{expert_consultant_id}/` + .replace(`{${"expert_consultant_id"}}`, encodeURIComponent(String(expertConsultantId))) + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {number} provenanceId + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDelProvenanceDestroy: async (id: number, provenanceId: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementDelProvenanceDestroy', 'id', id) + // verify required parameter 'provenanceId' is not null or undefined + assertParamExists('composerConnectivityStatementDelProvenanceDestroy', 'provenanceId', provenanceId) + const localVarPath = `/api/composer/connectivity-statement/{id}/del_provenance/{provenance_id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))) + .replace(`{${"provenance_id"}}`, encodeURIComponent(String(provenanceId))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {number} specieId + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDelSpecieCreate: async (id: number, specieId: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementDelSpecieCreate', 'id', id) + // verify required parameter 'specieId' is not null or undefined + assertParamExists('composerConnectivityStatementDelSpecieCreate', 'specieId', specieId) + const localVarPath = `/api/composer/connectivity-statement/{id}/del_specie/{specie_id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))) + .replace(`{${"specie_id"}}`, encodeURIComponent(String(specieId))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {number} tagId + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDelTagCreate: async (id: number, tagId: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementDelTagCreate', 'id', id) + // verify required parameter 'tagId' is not null or undefined + assertParamExists('composerConnectivityStatementDelTagCreate', 'tagId', tagId) + const localVarPath = `/api/composer/connectivity-statement/{id}/del_tag/{tag_id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))) + .replace(`{${"tag_id"}}`, encodeURIComponent(String(tagId))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDestroy: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementDestroy', 'id', id) + const localVarPath = `/api/composer/connectivity-statement/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {string} transition + * @param {ConnectivityStatement} [connectivityStatement] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDoTransitionCreate: async (id: number, transition: string, connectivityStatement?: ConnectivityStatement, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementDoTransitionCreate', 'id', id) + // verify required parameter 'transition' is not null or undefined + assertParamExists('composerConnectivityStatementDoTransitionCreate', 'transition', transition) + const localVarPath = `/api/composer/connectivity-statement/{id}/do_transition/{transition}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))) + .replace(`{${"transition"}}`, encodeURIComponent(String(transition))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + setSearchParams(localVarUrlObj, localVarQueryParameter); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatement, localVarRequestOptions, configuration) + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * ConnectivityStatement + * @param {Array} [destinations] + * @param {Array} [excludeIds] Multiple values may be separated by commas. + * @param {number} [excludeSentenceId] + * @param {boolean} [hasStatementBeenExported] Is Exported + * @param {Array} [include] Multiple values may be separated by commas. + * @param {string} [knowledgeStatement] + * @param {number} [limit] Number of results to return per page. + * @param {boolean} [notes] Checks if entity has notes + * @param {number} [offset] The initial index from which to return the results. + * @param {Array} [ordering] Ordering + * @param {Array} [origins] + * @param {Array} [populationset] + * @param {number} [sentenceId] + * @param {Array} [state] + * @param {Array} [tags] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementList: async (destinations?: Array, excludeIds?: Array, excludeSentenceId?: number, hasStatementBeenExported?: boolean, include?: Array, knowledgeStatement?: string, limit?: number, notes?: boolean, offset?: number, ordering?: Array, origins?: Array, populationset?: Array, sentenceId?: number, state?: Array, tags?: Array, options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/api/composer/connectivity-statement/`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication basicAuth required + // http basic authentication required + setBasicAuthToObject(localVarRequestOptions, configuration) + + // authentication tokenAuth required + await setApiKeyToObject(localVarHeaderParameter, "Authorization", configuration) + + // authentication cookieAuth required + + if (destinations) { + localVarQueryParameter['destinations'] = destinations; + } + + if (excludeIds) { + localVarQueryParameter['exclude_ids'] = excludeIds.join(COLLECTION_FORMATS.csv); + } + + if (excludeSentenceId !== undefined) { + localVarQueryParameter['exclude_sentence_id'] = excludeSentenceId; + } + + if (hasStatementBeenExported !== undefined) { + localVarQueryParameter['has_statement_been_exported'] = hasStatementBeenExported; + } + + if (include) { + localVarQueryParameter['include'] = include.join(COLLECTION_FORMATS.csv); + } + + if (knowledgeStatement !== undefined) { + localVarQueryParameter['knowledge_statement'] = knowledgeStatement; + } + + if (limit !== undefined) { + localVarQueryParameter['limit'] = limit; + } + + if (notes !== undefined) { + localVarQueryParameter['notes'] = notes; + } + + if (offset !== undefined) { + localVarQueryParameter['offset'] = offset; + } + + if (ordering) { + localVarQueryParameter['ordering'] = ordering.join(COLLECTION_FORMATS.csv); + } + + if (origins) { + localVarQueryParameter['origins'] = origins; + } + + if (populationset) { + localVarQueryParameter['populationset'] = populationset; + } + + if (sentenceId !== undefined) { + localVarQueryParameter['sentence_id'] = sentenceId; + } + + if (state) { + localVarQueryParameter['state'] = state; + } + + if (tags) { + localVarQueryParameter['tags'] = tags; + } + + + setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatement, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4080,18 +5048,15 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. - * @param {number} provenanceId + * @param {PatchedConnectivityStatementUpdate} [patchedConnectivityStatementUpdate] * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementDelProvenanceDestroy: async (id: number, provenanceId: number, options: RawAxiosRequestConfig = {}): Promise => { + composerConnectivityStatementPartialUpdate: async (id: number, patchedConnectivityStatementUpdate?: PatchedConnectivityStatementUpdate, options: RawAxiosRequestConfig = {}): Promise => { // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementDelProvenanceDestroy', 'id', id) - // verify required parameter 'provenanceId' is not null or undefined - assertParamExists('composerConnectivityStatementDelProvenanceDestroy', 'provenanceId', provenanceId) - const localVarPath = `/api/composer/connectivity-statement/{id}/del_provenance/{provenance_id}/` - .replace(`{${"id"}}`, encodeURIComponent(String(id))) - .replace(`{${"provenance_id"}}`, encodeURIComponent(String(provenanceId))); + assertParamExists('composerConnectivityStatementPartialUpdate', 'id', id) + const localVarPath = `/api/composer/connectivity-statement/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); let baseOptions; @@ -4099,7 +5064,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'DELETE', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'PATCH', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -4114,9 +5079,12 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat + localVarHeaderParameter['Content-Type'] = 'application/json'; + setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(patchedConnectivityStatementUpdate, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4126,18 +5094,14 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. - * @param {number} specieId * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementDelSpecieCreate: async (id: number, specieId: number, options: RawAxiosRequestConfig = {}): Promise => { + composerConnectivityStatementRetrieve: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementDelSpecieCreate', 'id', id) - // verify required parameter 'specieId' is not null or undefined - assertParamExists('composerConnectivityStatementDelSpecieCreate', 'specieId', specieId) - const localVarPath = `/api/composer/connectivity-statement/{id}/del_specie/{specie_id}/` - .replace(`{${"id"}}`, encodeURIComponent(String(id))) - .replace(`{${"specie_id"}}`, encodeURIComponent(String(specieId))); + assertParamExists('composerConnectivityStatementRetrieve', 'id', id) + const localVarPath = `/api/composer/connectivity-statement/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); let baseOptions; @@ -4145,7 +5109,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -4170,20 +5134,15 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {number} tagId + * ConnectivityStatementText: Manage text-based relationships + * @param {ConnectivityStatementText} connectivityStatementText * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementDelTagCreate: async (id: number, tagId: number, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementDelTagCreate', 'id', id) - // verify required parameter 'tagId' is not null or undefined - assertParamExists('composerConnectivityStatementDelTagCreate', 'tagId', tagId) - const localVarPath = `/api/composer/connectivity-statement/{id}/del_tag/{tag_id}/` - .replace(`{${"id"}}`, encodeURIComponent(String(id))) - .replace(`{${"tag_id"}}`, encodeURIComponent(String(tagId))); + composerConnectivityStatementTextCreate: async (connectivityStatementText: ConnectivityStatementText, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'connectivityStatementText' is not null or undefined + assertParamExists('composerConnectivityStatementTextCreate', 'connectivityStatementText', connectivityStatementText) + const localVarPath = `/api/composer/connectivityStatementText/`; // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); let baseOptions; @@ -4206,9 +5165,12 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat + localVarHeaderParameter['Content-Type'] = 'application/json'; + setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatementText, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4216,15 +5178,15 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementDestroy: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + composerConnectivityStatementTextDestroy: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementDestroy', 'id', id) - const localVarPath = `/api/composer/connectivity-statement/{id}/` + assertParamExists('composerConnectivityStatementTextDestroy', 'id', id) + const localVarPath = `/api/composer/connectivityStatementText/{id}/` .replace(`{${"id"}}`, encodeURIComponent(String(id))); // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); @@ -4258,21 +5220,14 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {string} transition - * @param {ConnectivityStatement} [connectivityStatement] + * ConnectivityStatementText: Manage text-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementDoTransitionCreate: async (id: number, transition: string, connectivityStatement?: ConnectivityStatement, options: RawAxiosRequestConfig = {}): Promise => { - // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementDoTransitionCreate', 'id', id) - // verify required parameter 'transition' is not null or undefined - assertParamExists('composerConnectivityStatementDoTransitionCreate', 'transition', transition) - const localVarPath = `/api/composer/connectivity-statement/{id}/do_transition/{transition}/` - .replace(`{${"id"}}`, encodeURIComponent(String(id))) - .replace(`{${"transition"}}`, encodeURIComponent(String(transition))); + composerConnectivityStatementTextList: async (limit?: number, offset?: number, options: RawAxiosRequestConfig = {}): Promise => { + const localVarPath = `/api/composer/connectivityStatementText/`; // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); let baseOptions; @@ -4280,7 +5235,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -4293,14 +5248,19 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat // authentication cookieAuth required + if (limit !== undefined) { + localVarQueryParameter['limit'] = limit; + } + + if (offset !== undefined) { + localVarQueryParameter['offset'] = offset; + } + - localVarHeaderParameter['Content-Type'] = 'application/json'; - setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatement, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4308,27 +5268,17 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {Array} [destinations] - * @param {Array} [excludeIds] Multiple values may be separated by commas. - * @param {number} [excludeSentenceId] - * @param {boolean} [hasStatementBeenExported] Is Exported - * @param {Array} [include] Multiple values may be separated by commas. - * @param {string} [knowledgeStatement] - * @param {number} [limit] Number of results to return per page. - * @param {boolean} [notes] Checks if entity has notes - * @param {number} [offset] The initial index from which to return the results. - * @param {Array} [ordering] Ordering - * @param {Array} [origins] - * @param {Array} [populationset] - * @param {number} [sentenceId] - * @param {Array} [state] - * @param {Array} [tags] + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {PatchedConnectivityStatementText} [patchedConnectivityStatementText] * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementList: async (destinations?: Array, excludeIds?: Array, excludeSentenceId?: number, hasStatementBeenExported?: boolean, include?: Array, knowledgeStatement?: string, limit?: number, notes?: boolean, offset?: number, ordering?: Array, origins?: Array, populationset?: Array, sentenceId?: number, state?: Array, tags?: Array, options: RawAxiosRequestConfig = {}): Promise => { - const localVarPath = `/api/composer/connectivity-statement/`; + composerConnectivityStatementTextPartialUpdate: async (id: number, patchedConnectivityStatementText?: PatchedConnectivityStatementText, options: RawAxiosRequestConfig = {}): Promise => { + // verify required parameter 'id' is not null or undefined + assertParamExists('composerConnectivityStatementTextPartialUpdate', 'id', id) + const localVarPath = `/api/composer/connectivityStatementText/{id}/` + .replace(`{${"id"}}`, encodeURIComponent(String(id))); // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); let baseOptions; @@ -4336,7 +5286,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'PATCH', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -4349,71 +5299,14 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat // authentication cookieAuth required - if (destinations) { - localVarQueryParameter['destinations'] = destinations; - } - - if (excludeIds) { - localVarQueryParameter['exclude_ids'] = excludeIds.join(COLLECTION_FORMATS.csv); - } - - if (excludeSentenceId !== undefined) { - localVarQueryParameter['exclude_sentence_id'] = excludeSentenceId; - } - - if (hasStatementBeenExported !== undefined) { - localVarQueryParameter['has_statement_been_exported'] = hasStatementBeenExported; - } - - if (include) { - localVarQueryParameter['include'] = include.join(COLLECTION_FORMATS.csv); - } - - if (knowledgeStatement !== undefined) { - localVarQueryParameter['knowledge_statement'] = knowledgeStatement; - } - - if (limit !== undefined) { - localVarQueryParameter['limit'] = limit; - } - - if (notes !== undefined) { - localVarQueryParameter['notes'] = notes; - } - - if (offset !== undefined) { - localVarQueryParameter['offset'] = offset; - } - - if (ordering) { - localVarQueryParameter['ordering'] = ordering.join(COLLECTION_FORMATS.csv); - } - - if (origins) { - localVarQueryParameter['origins'] = origins; - } - - if (populationset) { - localVarQueryParameter['populationset'] = populationset; - } - - if (sentenceId !== undefined) { - localVarQueryParameter['sentence_id'] = sentenceId; - } - - if (state) { - localVarQueryParameter['state'] = state; - } - - if (tags) { - localVarQueryParameter['tags'] = tags; - } - + localVarHeaderParameter['Content-Type'] = 'application/json'; + setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(patchedConnectivityStatementText, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4421,16 +5314,15 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {PatchedConnectivityStatementUpdate} [patchedConnectivityStatementUpdate] + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementPartialUpdate: async (id: number, patchedConnectivityStatementUpdate?: PatchedConnectivityStatementUpdate, options: RawAxiosRequestConfig = {}): Promise => { + composerConnectivityStatementTextRetrieve: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementPartialUpdate', 'id', id) - const localVarPath = `/api/composer/connectivity-statement/{id}/` + assertParamExists('composerConnectivityStatementTextRetrieve', 'id', id) + const localVarPath = `/api/composer/connectivityStatementText/{id}/` .replace(`{${"id"}}`, encodeURIComponent(String(id))); // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); @@ -4439,7 +5331,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'PATCH', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -4454,12 +5346,9 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat - localVarHeaderParameter['Content-Type'] = 'application/json'; - setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; - localVarRequestOptions.data = serializeDataIfNeeded(patchedConnectivityStatementUpdate, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4467,15 +5356,18 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {ConnectivityStatementText} connectivityStatementText * @param {*} [options] Override http request option. * @throws {RequiredError} */ - composerConnectivityStatementRetrieve: async (id: number, options: RawAxiosRequestConfig = {}): Promise => { + composerConnectivityStatementTextUpdate: async (id: number, connectivityStatementText: ConnectivityStatementText, options: RawAxiosRequestConfig = {}): Promise => { // verify required parameter 'id' is not null or undefined - assertParamExists('composerConnectivityStatementRetrieve', 'id', id) - const localVarPath = `/api/composer/connectivity-statement/{id}/` + assertParamExists('composerConnectivityStatementTextUpdate', 'id', id) + // verify required parameter 'connectivityStatementText' is not null or undefined + assertParamExists('composerConnectivityStatementTextUpdate', 'connectivityStatementText', connectivityStatementText) + const localVarPath = `/api/composer/connectivityStatementText/{id}/` .replace(`{${"id"}}`, encodeURIComponent(String(id))); // use dummy base URL string because the URL constructor only accepts absolute URLs. const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); @@ -4484,7 +5376,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat baseOptions = configuration.baseOptions; } - const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options}; + const localVarRequestOptions = { method: 'PUT', ...baseOptions, ...options}; const localVarHeaderParameter = {} as any; const localVarQueryParameter = {} as any; @@ -4499,9 +5391,12 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat + localVarHeaderParameter['Content-Type'] = 'application/json'; + setSearchParams(localVarUrlObj, localVarQueryParameter); let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + localVarRequestOptions.data = serializeDataIfNeeded(connectivityStatementText, localVarRequestOptions, configuration) return { url: toPathString(localVarUrlObj), @@ -4509,7 +5404,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -4553,7 +5448,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -4595,7 +5490,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} [limit] Number of results to return per page. * @param {number} [offset] The initial index from which to return the results. * @param {*} [options] Override http request option. @@ -4643,7 +5538,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {PatchedConnectivityStatementTriple} [patchedConnectivityStatementTriple] * @param {*} [options] Override http request option. @@ -4689,7 +5584,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -4731,7 +5626,7 @@ export const ComposerApiAxiosParamCreator = function (configuration?: Configurat }; }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. @@ -7447,46 +8342,134 @@ export const ComposerApiFp = function(configuration?: Configuration) { async composerAnatomicalEntityRetrieve(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { const localVarAxiosArgs = await localVarAxiosParamCreator.composerAnatomicalEntityRetrieve(id, options); const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerAnatomicalEntityRetrieve']?.[localVarOperationServerIndex]?.url; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerAnatomicalEntityRetrieve']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {ExpertConsultantCreate} expertConsultantCreate + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAddExpertConsultantCreate(id: number, expertConsultantCreate: ExpertConsultantCreate, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<{ [key: string]: any; }>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddExpertConsultantCreate(id, expertConsultantCreate, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddExpertConsultantCreate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {ProvenanceCreate} provenanceCreate + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAddProvenanceCreate(id: number, provenanceCreate: ProvenanceCreate, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<{ [key: string]: any; }>> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddProvenanceCreate(id, provenanceCreate, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddProvenanceCreate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {number} specieId + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAddSpecieCreate(id: number, specieId: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddSpecieCreate(id, specieId, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddSpecieCreate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {number} tagId + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAddTagCreate(id: number, tagId: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddTagCreate(id, tagId, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddTagCreate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAnatomicalEntityCreate(connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAnatomicalEntityCreate(connectivityStatementAnatomicalEntity, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAnatomicalEntityCreate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAnatomicalEntityDestroy(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAnatomicalEntityDestroy(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAnatomicalEntityDestroy']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementAnatomicalEntityList(limit?: number, offset?: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAnatomicalEntityList(limit, offset, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAnatomicalEntityList']?.[localVarOperationServerIndex]?.url; return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {ProvenanceCreate} provenanceCreate + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {PatchedConnectivityStatementAnatomicalEntity} [patchedConnectivityStatementAnatomicalEntity] * @param {*} [options] Override http request option. * @throws {RequiredError} */ - async composerConnectivityStatementAddProvenanceCreate(id: number, provenanceCreate: ProvenanceCreate, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<{ [key: string]: any; }>> { - const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddProvenanceCreate(id, provenanceCreate, options); + async composerConnectivityStatementAnatomicalEntityPartialUpdate(id: number, patchedConnectivityStatementAnatomicalEntity?: PatchedConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAnatomicalEntityPartialUpdate(id, patchedConnectivityStatementAnatomicalEntity, options); const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddProvenanceCreate']?.[localVarOperationServerIndex]?.url; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAnatomicalEntityPartialUpdate']?.[localVarOperationServerIndex]?.url; return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {number} specieId + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. * @param {*} [options] Override http request option. * @throws {RequiredError} */ - async composerConnectivityStatementAddSpecieCreate(id: number, specieId: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddSpecieCreate(id, specieId, options); + async composerConnectivityStatementAnatomicalEntityRetrieve(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAnatomicalEntityRetrieve(id, options); const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddSpecieCreate']?.[localVarOperationServerIndex]?.url; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAnatomicalEntityRetrieve']?.[localVarOperationServerIndex]?.url; return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatement - * @param {number} id A unique integer value identifying this connectivity statement. - * @param {number} tagId + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity * @param {*} [options] Override http request option. * @throws {RequiredError} */ - async composerConnectivityStatementAddTagCreate(id: number, tagId: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { - const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAddTagCreate(id, tagId, options); + async composerConnectivityStatementAnatomicalEntityUpdate(id: number, connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementAnatomicalEntityUpdate(id, connectivityStatementAnatomicalEntity, options); const localVarOperationServerIndex = configuration?.serverIndex ?? 0; - const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAddTagCreate']?.[localVarOperationServerIndex]?.url; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementAnatomicalEntityUpdate']?.[localVarOperationServerIndex]?.url; return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** @@ -7575,6 +8558,19 @@ export const ComposerApiFp = function(configuration?: Configuration) { const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementCreate']?.[localVarOperationServerIndex]?.url; return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, + /** + * ConnectivityStatement + * @param {number} expertConsultantId + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId: number, id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId, id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementDelExpertConsultantDestroy']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -7692,7 +8688,82 @@ export const ComposerApiFp = function(configuration?: Configuration) { return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementText: Manage text-based relationships + * @param {ConnectivityStatementText} connectivityStatementText + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementTextCreate(connectivityStatementText: ConnectivityStatementText, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementTextCreate(connectivityStatementText, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementTextCreate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementTextDestroy(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementTextDestroy(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementTextDestroy']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementTextList(limit?: number, offset?: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementTextList(limit, offset, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementTextList']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {PatchedConnectivityStatementText} [patchedConnectivityStatementText] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementTextPartialUpdate(id: number, patchedConnectivityStatementText?: PatchedConnectivityStatementText, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementTextPartialUpdate(id, patchedConnectivityStatementText, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementTextPartialUpdate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementTextRetrieve(id: number, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementTextRetrieve(id, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementTextRetrieve']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {ConnectivityStatementText} connectivityStatementText + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async composerConnectivityStatementTextUpdate(id: number, connectivityStatementText: ConnectivityStatementText, options?: RawAxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await localVarAxiosParamCreator.composerConnectivityStatementTextUpdate(id, connectivityStatementText, options); + const localVarOperationServerIndex = configuration?.serverIndex ?? 0; + const localVarOperationServerBasePath = operationServerMap['ComposerApi.composerConnectivityStatementTextUpdate']?.[localVarOperationServerIndex]?.url; + return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); + }, + /** + * ConnectivityStatementTriple: Manage triple-based relationships * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -7704,7 +8775,7 @@ export const ComposerApiFp = function(configuration?: Configuration) { return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -7716,7 +8787,7 @@ export const ComposerApiFp = function(configuration?: Configuration) { return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} [limit] Number of results to return per page. * @param {number} [offset] The initial index from which to return the results. * @param {*} [options] Override http request option. @@ -7729,7 +8800,7 @@ export const ComposerApiFp = function(configuration?: Configuration) { return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {PatchedConnectivityStatementTriple} [patchedConnectivityStatementTriple] * @param {*} [options] Override http request option. @@ -7742,7 +8813,7 @@ export const ComposerApiFp = function(configuration?: Configuration) { return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -7754,7 +8825,7 @@ export const ComposerApiFp = function(configuration?: Configuration) { return (axios, basePath) => createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)(axios, localVarOperationServerBasePath || basePath); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. @@ -8531,6 +9602,16 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP composerAnatomicalEntityRetrieve(id: number, options?: RawAxiosRequestConfig): AxiosPromise { return localVarFp.composerAnatomicalEntityRetrieve(id, options).then((request) => request(axios, basePath)); }, + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {ExpertConsultantCreate} expertConsultantCreate + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAddExpertConsultantCreate(id: number, expertConsultantCreate: ExpertConsultantCreate, options?: RawAxiosRequestConfig): AxiosPromise<{ [key: string]: any; }> { + return localVarFp.composerConnectivityStatementAddExpertConsultantCreate(id, expertConsultantCreate, options).then((request) => request(axios, basePath)); + }, /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -8561,6 +9642,63 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP composerConnectivityStatementAddTagCreate(id: number, tagId: number, options?: RawAxiosRequestConfig): AxiosPromise { return localVarFp.composerConnectivityStatementAddTagCreate(id, tagId, options).then((request) => request(axios, basePath)); }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityCreate(connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementAnatomicalEntityCreate(connectivityStatementAnatomicalEntity, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityDestroy(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementAnatomicalEntityDestroy(id, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityList(limit?: number, offset?: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementAnatomicalEntityList(limit, offset, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {PatchedConnectivityStatementAnatomicalEntity} [patchedConnectivityStatementAnatomicalEntity] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityPartialUpdate(id: number, patchedConnectivityStatementAnatomicalEntity?: PatchedConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementAnatomicalEntityPartialUpdate(id, patchedConnectivityStatementAnatomicalEntity, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityRetrieve(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementAnatomicalEntityRetrieve(id, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementAnatomicalEntityUpdate(id: number, connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementAnatomicalEntityUpdate(id, connectivityStatementAnatomicalEntity, options).then((request) => request(axios, basePath)); + }, /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -8632,6 +9770,16 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP composerConnectivityStatementCreate(connectivityStatement?: ConnectivityStatement, options?: RawAxiosRequestConfig): AxiosPromise { return localVarFp.composerConnectivityStatementCreate(connectivityStatement, options).then((request) => request(axios, basePath)); }, + /** + * ConnectivityStatement + * @param {number} expertConsultantId + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId: number, id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId, id, options).then((request) => request(axios, basePath)); + }, /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -8725,7 +9873,64 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP return localVarFp.composerConnectivityStatementRetrieve(id, options).then((request) => request(axios, basePath)); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementText: Manage text-based relationships + * @param {ConnectivityStatementText} connectivityStatementText + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementTextCreate(connectivityStatementText: ConnectivityStatementText, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementTextCreate(connectivityStatementText, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementTextDestroy(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementTextDestroy(id, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementTextList(limit?: number, offset?: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementTextList(limit, offset, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {PatchedConnectivityStatementText} [patchedConnectivityStatementText] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementTextPartialUpdate(id: number, patchedConnectivityStatementText?: PatchedConnectivityStatementText, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementTextPartialUpdate(id, patchedConnectivityStatementText, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementTextRetrieve(id: number, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementTextRetrieve(id, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {ConnectivityStatementText} connectivityStatementText + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + composerConnectivityStatementTextUpdate(id: number, connectivityStatementText: ConnectivityStatementText, options?: RawAxiosRequestConfig): AxiosPromise { + return localVarFp.composerConnectivityStatementTextUpdate(id, connectivityStatementText, options).then((request) => request(axios, basePath)); + }, + /** + * ConnectivityStatementTriple: Manage triple-based relationships * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -8734,7 +9939,7 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP return localVarFp.composerConnectivityStatementTripleCreate(connectivityStatementTriple, options).then((request) => request(axios, basePath)); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -8743,7 +9948,7 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP return localVarFp.composerConnectivityStatementTripleDestroy(id, options).then((request) => request(axios, basePath)); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} [limit] Number of results to return per page. * @param {number} [offset] The initial index from which to return the results. * @param {*} [options] Override http request option. @@ -8753,7 +9958,7 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP return localVarFp.composerConnectivityStatementTripleList(limit, offset, options).then((request) => request(axios, basePath)); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {PatchedConnectivityStatementTriple} [patchedConnectivityStatementTriple] * @param {*} [options] Override http request option. @@ -8763,7 +9968,7 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP return localVarFp.composerConnectivityStatementTriplePartialUpdate(id, patchedConnectivityStatementTriple, options).then((request) => request(axios, basePath)); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -8772,7 +9977,7 @@ export const ComposerApiFactory = function (configuration?: Configuration, baseP return localVarFp.composerConnectivityStatementTripleRetrieve(id, options).then((request) => request(axios, basePath)); }, /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. @@ -9389,6 +10594,18 @@ export class ComposerApi extends BaseAPI { return ComposerApiFp(this.configuration).composerAnatomicalEntityRetrieve(id, options).then((request) => request(this.axios, this.basePath)); } + /** + * ConnectivityStatement + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {ExpertConsultantCreate} expertConsultantCreate + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAddExpertConsultantCreate(id: number, expertConsultantCreate: ExpertConsultantCreate, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAddExpertConsultantCreate(id, expertConsultantCreate, options).then((request) => request(this.axios, this.basePath)); + } + /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -9425,6 +10642,75 @@ export class ComposerApi extends BaseAPI { return ComposerApiFp(this.configuration).composerConnectivityStatementAddTagCreate(id, tagId, options).then((request) => request(this.axios, this.basePath)); } + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAnatomicalEntityCreate(connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAnatomicalEntityCreate(connectivityStatementAnatomicalEntity, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAnatomicalEntityDestroy(id: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAnatomicalEntityDestroy(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAnatomicalEntityList(limit?: number, offset?: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAnatomicalEntityList(limit, offset, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {PatchedConnectivityStatementAnatomicalEntity} [patchedConnectivityStatementAnatomicalEntity] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAnatomicalEntityPartialUpdate(id: number, patchedConnectivityStatementAnatomicalEntity?: PatchedConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAnatomicalEntityPartialUpdate(id, patchedConnectivityStatementAnatomicalEntity, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAnatomicalEntityRetrieve(id: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAnatomicalEntityRetrieve(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based relationships + * @param {number} id A unique integer value identifying this connectivity statement anatomical entity. + * @param {ConnectivityStatementAnatomicalEntity} connectivityStatementAnatomicalEntity + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementAnatomicalEntityUpdate(id: number, connectivityStatementAnatomicalEntity: ConnectivityStatementAnatomicalEntity, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementAnatomicalEntityUpdate(id, connectivityStatementAnatomicalEntity, options).then((request) => request(this.axios, this.basePath)); + } + /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -9506,6 +10792,18 @@ export class ComposerApi extends BaseAPI { return ComposerApiFp(this.configuration).composerConnectivityStatementCreate(connectivityStatement, options).then((request) => request(this.axios, this.basePath)); } + /** + * ConnectivityStatement + * @param {number} expertConsultantId + * @param {number} id A unique integer value identifying this connectivity statement. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId: number, id: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId, id, options).then((request) => request(this.axios, this.basePath)); + } + /** * ConnectivityStatement * @param {number} id A unique integer value identifying this connectivity statement. @@ -9615,7 +10913,76 @@ export class ComposerApi extends BaseAPI { } /** - * ConnectivityStatementTriple: + * ConnectivityStatementText: Manage text-based relationships + * @param {ConnectivityStatementText} connectivityStatementText + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementTextCreate(connectivityStatementText: ConnectivityStatementText, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementTextCreate(connectivityStatementText, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementTextDestroy(id: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementTextDestroy(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} [limit] Number of results to return per page. + * @param {number} [offset] The initial index from which to return the results. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementTextList(limit?: number, offset?: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementTextList(limit, offset, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {PatchedConnectivityStatementText} [patchedConnectivityStatementText] + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementTextPartialUpdate(id: number, patchedConnectivityStatementText?: PatchedConnectivityStatementText, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementTextPartialUpdate(id, patchedConnectivityStatementText, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementTextRetrieve(id: number, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementTextRetrieve(id, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementText: Manage text-based relationships + * @param {number} id A unique integer value identifying this connectivity statement text. + * @param {ConnectivityStatementText} connectivityStatementText + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ComposerApi + */ + public composerConnectivityStatementTextUpdate(id: number, connectivityStatementText: ConnectivityStatementText, options?: RawAxiosRequestConfig) { + return ComposerApiFp(this.configuration).composerConnectivityStatementTextUpdate(id, connectivityStatementText, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * ConnectivityStatementTriple: Manage triple-based relationships * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -9626,7 +10993,7 @@ export class ComposerApi extends BaseAPI { } /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -9637,7 +11004,7 @@ export class ComposerApi extends BaseAPI { } /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} [limit] Number of results to return per page. * @param {number} [offset] The initial index from which to return the results. * @param {*} [options] Override http request option. @@ -9649,7 +11016,7 @@ export class ComposerApi extends BaseAPI { } /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {PatchedConnectivityStatementTriple} [patchedConnectivityStatementTriple] * @param {*} [options] Override http request option. @@ -9661,7 +11028,7 @@ export class ComposerApi extends BaseAPI { } /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {*} [options] Override http request option. * @throws {RequiredError} @@ -9672,7 +11039,7 @@ export class ComposerApi extends BaseAPI { } /** - * ConnectivityStatementTriple: + * ConnectivityStatementTriple: Manage triple-based relationships * @param {number} id A unique integer value identifying this connectivity statement triple. * @param {ConnectivityStatementTriple} connectivityStatementTriple * @param {*} [options] Override http request option. diff --git a/applications/composer/frontend/src/components/DistillationTab/DistillationTab.tsx b/applications/composer/frontend/src/components/DistillationTab/DistillationTab.tsx index cf04f89a..e70615cf 100644 --- a/applications/composer/frontend/src/components/DistillationTab/DistillationTab.tsx +++ b/applications/composer/frontend/src/components/DistillationTab/DistillationTab.tsx @@ -11,6 +11,7 @@ import { useSectionStyle, useGreyBgContainer } from "../../styles/styles"; import { useTheme } from "@mui/system"; import StatementDetailsAccordion from "../TriageStatementSection/StatementDetailsAccordion"; import ProvenancesForm from "../Forms/ProvenanceForm"; +import ExpertConsultantsForm from "../Forms/ExpertConsultantForm"; import StatementPreviewForm from "../Forms/StatementPreviewForm"; import StatementTriples from "../Forms/StatementTriples"; @@ -64,6 +65,13 @@ const DistillationTab = ({ className="provenance" isDisabled={isDisabled} /> + { + const { expertConsultantsData, setter, extraData, isDisabled } = props + const [isLoading, setIsLoading] = useState(false) + + const { schema, uiSchema } = jsonSchemas.getExpertConsultantsSchema() + const copiedSchema = JSON.parse(JSON.stringify(schema)); + const copiedUISchema = JSON.parse(JSON.stringify(uiSchema)); + + const refresh = () => { + setter() + } + + copiedSchema.title = "" + + const handleAutocompleteChange = (e:any, value:any)=>{ + const newValue = value.pop() + + // Validate the URI format before saving + if (!isValidURI(newValue)) { + alert(getURIValidationErrorMessage("expert consultant URI")); + return; + } + + setIsLoading(true) + return checkOwnership( + extraData.connectivity_statement_id, + async () => { + expertConsultantService.save({statementId: extraData.connectivity_statement_id, uri: newValue}).then(()=>{ + setter() + }).catch((error) => { + // Handle backend validation errors + if (error.response && error.response.data && error.response.data.uri) { + alert(`Validation error: ${error.response.data.uri[0]}`); + } else { + alert('Failed to save expert consultant. Please try again.'); + } + }).finally(() => { + setIsLoading(false) + }) + }, + () => { + setIsLoading(false) + return ChangeRequestStatus.CANCELLED; + }, + getOwnershipAlertMessage // message to show when ownership needs to be reassigned + ); + } + + copiedUISchema.uri = { + "ui:widget": TextfieldWithChips, + "ui:options": { + isDisabled: !extraData.connectivity_statement_id || isDisabled, + data: expertConsultantsData?.map((row: ExpertConsultant) => ({id: row.id, label: row.uri, enableClick: isValidUrl(row.uri) })) || [], + placeholder: isDisabled ? null : 'Enter Expert Consultant URIs (Press Enter to add)', + removeChip: function(expertConsultantId: any) { + setIsLoading(true) + return checkOwnership( + extraData.connectivity_statement_id, + async () => { + await expertConsultantService.delete(expertConsultantId, extraData.connectivity_statement_id) + refresh() + setIsLoading(false) + }, + () => { + setIsLoading(false) + return ChangeRequestStatus.CANCELLED; + }, + getOwnershipAlertMessage // message to show when ownership needs to be reassigned + ); + + }, + onAutocompleteChange: handleAutocompleteChange, + } + } + copiedUISchema.connectivity_statement_id = { + "ui:widget": 'hidden', + } + + copiedSchema.properties.connectivity_statement_id = { + ...copiedSchema.properties.connectivity_statement_id, + default: extraData.connectivity_statement_id + } + + return ( + refresh()} + disabled={isDisabled} + isLoading={isLoading} + /> + ) +} + +export default ExpertConsultantsForm diff --git a/applications/composer/frontend/src/components/Forms/ProvenanceForm.tsx b/applications/composer/frontend/src/components/Forms/ProvenanceForm.tsx index fb57a17a..8f3ea332 100644 --- a/applications/composer/frontend/src/components/Forms/ProvenanceForm.tsx +++ b/applications/composer/frontend/src/components/Forms/ProvenanceForm.tsx @@ -6,6 +6,7 @@ import {Provenance} from "../../apiclient/backend"; import TextfieldWithChips from "../Widgets/TextfieldWithChips"; import {checkOwnership, getOwnershipAlertMessage} from "../../helpers/ownershipAlert"; import {ChangeRequestStatus} from "../../helpers/settings"; +import {isValidURI, isValidUrl, getURIValidationErrorMessage} from "../../helpers/uriValidation"; const ProvenancesForm = (props: any) => { const { provenancesData, setter, extraData, isDisabled } = props @@ -26,14 +27,8 @@ const ProvenancesForm = (props: any) => { const newValue = value.pop() // Validate the URI format before saving - if (!isValidProvenance(newValue)) { - alert( - "Invalid provenance format. Please enter a valid:\n" + - "• DOI (e.g., '10.1000/xyz123' or 'https://doi.org/10.1000/xyz123')\n" + - "• PMID (e.g., 'PMID:12345678' or 'https://pubmed.ncbi.nlm.nih.gov/12345678')\n" + - "• PMCID (e.g., 'PMC1234567' or 'https://www.ncbi.nlm.nih.gov/pmc/articles/PMC1234567')\n" + - "• URL (e.g., 'https://example.com')" - ); + if (!isValidURI(newValue)) { + alert(getURIValidationErrorMessage("provenance")); return; } @@ -62,54 +57,6 @@ const ProvenancesForm = (props: any) => { ); } - const isValidProvenance = (uri: string) => { - if (!uri || !uri.trim()) { - return false; - } - - const trimmedUri = uri.trim(); - - // DOI patterns - const doiPatterns = [ - /^10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/, // Standard DOI format - no consecutive slashes - /^doi:10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/i, // DOI with prefix - /^https?:\/\/doi\.org\/10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/i, // DOI URL - /^https?:\/\/dx\.doi\.org\/10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/i, // Alternative DOI URL - ]; - - // PMID patterns - const pmidPatterns = [ - /^PMID:\s*\d+$/i, // PMID with prefix - /^https?:\/\/pubmed\.ncbi\.nlm\.nih\.gov\/\d+\/?$/i, // PubMed URL - ]; - - // PMCID patterns - const pmcidPatterns = [ - /^PMC\d+$/i, // PMC ID format - /^PMCID:\s*PMC\d+$/i, // PMCID with prefix - /^https?:\/\/www\.ncbi\.nlm\.nih\.gov\/pmc\/articles\/PMC\d+\/?$/i, // PMC URL - ]; - - // URL pattern - const urlPattern = /^https?:\/\/[a-zA-Z0-9\-.]+(?::[0-9]+)?(?:\/[a-zA-Z0-9\-._~!$&'()*+,;=:@]+)*(?:\?[a-zA-Z0-9\-._~!$&'()*+,;=:@\/?]*)?(?:\#[a-zA-Z0-9\-._~!$&'()*+,;=:@\/?]*)?$/i; - - // Check if it matches any of the valid patterns - const allPatterns = [...doiPatterns, ...pmidPatterns, ...pmcidPatterns, urlPattern]; - - return allPatterns.some(pattern => pattern.test(trimmedUri)); - } - - const isValidUrl = (uri: string) =>{ - var urlPattern = new RegExp('^(https?:\\/\\/)?'+ - '((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|'+ - '((\\d{1,3}\\.){3}\\d{1,3}))'+ - '(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*'+ - '(\\?[;&a-z\\d%_.~+=-]*)?'+ - '(\\#[-a-z\\d_]*)?$','i') - if (!uri.match(urlPattern)) return false - return true - } - copiedUISchema.uri = { "ui:widget": TextfieldWithChips, "ui:options": { diff --git a/applications/composer/frontend/src/components/Forms/StatementForm.tsx b/applications/composer/frontend/src/components/Forms/StatementForm.tsx index b4e2da1a..4afe356a 100644 --- a/applications/composer/frontend/src/components/Forms/StatementForm.tsx +++ b/applications/composer/frontend/src/components/Forms/StatementForm.tsx @@ -1,4 +1,4 @@ -import React, { forwardRef, useEffect, useState } from "react"; +import React, { forwardRef, useEffect, useState, useMemo } from "react"; import { FormBase } from "./FormBase"; import { jsonSchemas } from "../../services/JsonSchema"; import statementService from "../../services/StatementService"; @@ -43,16 +43,347 @@ import { checkOwnership, getOwnershipAlertMessage } from "../../helpers/ownershi import { useDispatch } from "react-redux"; import { setWasChangeDetected } from "../../redux/statementSlice"; import { AutocompleteWithChips } from "../Widgets/AutocompleteWithChips"; +import { RelationshipType, RelationshipOption } from "../../types/relationshipTypes"; +import { vars } from "../../theme/variables"; + +/** + * Helper function to wrap relationship updates with ownership check + */ +const withOwnershipCheck = async ( + statementId: number, + updateFn: () => Promise, + refreshStatement: () => void, +): Promise => { + return checkOwnership( + statementId, + async () => { + await updateFn(); + return ChangeRequestStatus.SAVED; + }, + () => {}, + getOwnershipAlertMessage + ).then((status) => { + if (status === ChangeRequestStatus.SAVED) { + refreshStatement(); + } + return status; + }); +}; + +/** + * Widget configuration factory for different relationship types + */ +const createWidgetConfig = ( + relationshipType: RelationshipType, + relationshipKey: string | number, + relationshipData: RelationshipOption, + propertyTitle: string, + statement: any, + isDisabled: boolean, + refreshStatement: () => void, +) => { + const configs = { + [RelationshipType.ANATOMICAL_MULTI]: () => ({ + "ui:widget": CustomEntitiesDropdown, + "ui:options": { + isDisabled, + statement: statement, + label: propertyTitle, + placeholder: `Select ${propertyTitle}`, + searchPlaceholder: `Search for ${propertyTitle}`, + noResultReason: "No anatomical entities found", + disabledReason: "", + fieldName: `statement_anatomical_entities.${relationshipKey}`, + chipsNumber: 5, + minWidth: "50rem", + refreshStatement: refreshStatement, + labelPosition: "top", + labelVariant: "h6", + labelFontWeight: 500, + labelMarginBottom: 2, + labelColor: vars.titleFontColor, + onSearch: async ( + searchValue: string, + formId: string, + selectedOptions: Option[], + ) => { + const excludedIds = selectedOptions.map((entity: Option) => + Number(entity.id), + ); + return getAnatomicalEntities( + searchValue, + propertyTitle || "", + excludedIds, + ); + }, + onUpdate: async (selectedOptions: Option[]) => { + return withOwnershipCheck( + statement.id, + async () => { + const currentRelationship = statement?.statement_anatomical_entities?.[relationshipKey]; + const selectedIds = selectedOptions.map((opt: Option) => Number(opt.id)); + + if (currentRelationship?.id) { + await statementService.updateAnatomicalEntityRelationship(currentRelationship.id, { + connectivity_statement: statement.id, + relationship: relationshipKey, + anatomical_entities: selectedIds + }); + } else if (selectedIds.length > 0) { + await statementService.assignAnatomicalEntityRelationship({ + connectivity_statement: statement.id, + relationship: relationshipKey, + anatomical_entities: selectedIds + }); + } else if (currentRelationship?.id && selectedIds.length === 0) { + await statementService.deleteAnatomicalEntityRelationship(currentRelationship.id); + } + }, + refreshStatement + ); + }, + errors: "", + mapValueToOption: (value: any) => { + const relationshipData = value || statement?.statement_anatomical_entities?.[relationshipKey]; + if (relationshipData?.anatomical_entities) { + return mapAnatomicalEntitiesToOptions( + relationshipData.anatomical_entities, + propertyTitle || "" + ); + } + return []; + }, + } + }), + [RelationshipType.TEXT]: () => ({ + "ui:widget": "CustomTextField", + "ui:options": { + isDisabled, + label: propertyTitle, + placeholder: `Enter ${propertyTitle}`, + onBlur2: async (value: any) => { + const currentText = statement?.statement_texts?.[relationshipKey]?.text; + + // Only trigger update if value actually changed + if (value === currentText) { + return; + } + + return withOwnershipCheck( + statement.id, + async () => { + const currentRelationship = statement?.statement_texts?.[relationshipKey]; + + if (currentRelationship?.id && (!value || value.trim() === "")) { + await statementService.deleteTextRelationship(currentRelationship.id); + } else if (currentRelationship?.id) { + await statementService.updateTextRelationship(currentRelationship.id, { + connectivity_statement: statement.id, + relationship: relationshipKey, + text: value + }); + } else if (value && value.trim() !== "") { + await statementService.assignTextRelationship({ + connectivity_statement: statement.id, + relationship: relationshipKey, + text: value + }); + } + }, + refreshStatement + ); + }, + } + }), + [RelationshipType.TRIPLE_MULTI]: () => { + const relationshipOption = relationshipData?.options?.map((option: any) => ({ + label: option.name, + value: option.id + })); + + return { + "ui:widget": "AutocompleteWithChips", + "ui:options": { + label: propertyTitle, + options: relationshipOption || [], + placeholder: `Select ${propertyTitle}...`, + data: statement?.statement_triples?.[relationshipKey]?.triples?.map((tripleId: number) => { + const option = relationshipData?.options?.find((opt: any) => opt.id === tripleId); + return { + label: option?.name || '', + value: tripleId + }; + }) || [], + removeChip: async (id: number) => { + return withOwnershipCheck( + statement.id, + async () => { + const currentRelationship = statement?.statement_triples?.[relationshipKey]; + if (currentRelationship?.id) { + const updatedTriples = currentRelationship.triples.filter((tid: number) => tid !== id); + if (updatedTriples.length > 0) { + await statementService.updateRelationship(currentRelationship.id, { + connectivity_statement: statement.id, + relationship: relationshipKey, + triples: updatedTriples + }); + } else { + await statementService.deleteRelationship(currentRelationship.id); + } + } + }, + refreshStatement + ); + }, + isDisabled, + onAutocompleteChange: async (event: any, newValue: any[]) => { + return withOwnershipCheck( + statement.id, + async () => { + const selectedTripleIds = newValue.map((v: any) => Number(v.value)); + const currentRelationship = statement?.statement_triples?.[relationshipKey]; + + if (currentRelationship?.id) { + if (selectedTripleIds.length > 0) { + await statementService.updateRelationship(currentRelationship.id, { + connectivity_statement: statement.id, + relationship: relationshipKey, + triples: selectedTripleIds + }); + } else { + await statementService.deleteRelationship(currentRelationship.id); + } + } else if (selectedTripleIds.length > 0) { + await statementService.assignRelationship({ + connectivity_statement: statement.id, + relationship: relationshipKey, + triples: selectedTripleIds + }); + } + }, + refreshStatement + ); + } + } + }; + }, + [RelationshipType.TRIPLE_SINGLE]: () => { + const relationshipOption = relationshipData?.options?.map((option: any) => ({ + label: option.name, + value: option.id + })); + + return { + "ui:widget": "CustomSingleSelect", + "ui:options": { + label: propertyTitle, + data: relationshipOption || [], + onChange2: async (value: any) => { + const currentRelationship = statement?.statement_triples?.[relationshipKey]; + const currentValue = currentRelationship?.triples?.[0]; + + // Only trigger update if value actually changed + if (value === currentValue) { + return; + } + + return withOwnershipCheck( + statement.id, + async () => { + if (currentRelationship?.id && value === null) { + await statementService.deleteRelationship(currentRelationship.id); + } else if (value !== null && !currentRelationship?.id) { + await statementService.assignRelationship({ + connectivity_statement: statement.id, + relationship: relationshipKey, + triples: [Number(value)] + }); + } else if (value !== null && currentRelationship?.id) { + await statementService.updateRelationship(currentRelationship.id, { + connectivity_statement: statement.id, + relationship: relationshipKey, + triples: [Number(value)] + }); + } + }, + refreshStatement + ); + }, + isDisabled, + } + }; + }, + }; + + const configFn = configs[relationshipType]; + if (configFn) { + return configFn(); + } + + // Fallback to text field + return { + "ui:widget": "CustomTextField", + "ui:options": { + isDisabled, + label: propertyTitle, + placeholder: `Enter ${propertyTitle}`, + } + }; +}; const StatementForm = forwardRef((props: any, ref: React.Ref) => { const { uiFields, statement, isDisabled, action: refreshStatement, onInputBlur, alertId, currentExpanded, onInputFocus } = props; const { schema, uiSchema } = jsonSchemas.getConnectivityStatementSchema(); const copiedSchema = JSON.parse(JSON.stringify(schema)); const copiedUISchema = JSON.parse(JSON.stringify(uiSchema)); - const [relationshipOptions, setRelationshipOptions] = useState([]); + const [relationshipOptions, setRelationshipOptions] = useState([]); const dispatch = useDispatch(); - // TODO: set up the widgets for the schema + + /** + * Transform statement data to match form schema expectations + * Maps nested relationship objects to simple values expected by the form + */ + const transformedStatement = useMemo(() => { + if (!statement || relationshipOptions.length === 0) return statement; + + const transformed = { ...statement }; + transformed.statement_triples = {}; + + // Transform each relationship based on its type + relationshipOptions.forEach((rel) => { + const key = String(rel.id); + + switch (rel.type) { + case RelationshipType.TEXT: + if (statement.statement_texts?.[key]) { + transformed.statement_triples[key] = statement.statement_texts[key].text; + } + break; + + case RelationshipType.TRIPLE_SINGLE: + if (statement.statement_triples?.[key]?.triples?.[0]) { + transformed.statement_triples[key] = statement.statement_triples[key].triples[0]; + } + break; + + case RelationshipType.TRIPLE_MULTI: + if (statement.statement_triples?.[key]?.triples) { + transformed.statement_triples[key] = statement.statement_triples[key].triples; + } + break; + + case RelationshipType.ANATOMICAL_MULTI: + if (statement.statement_anatomical_entities?.[key]) { + transformed.statement_triples[key] = statement.statement_anatomical_entities[key]; + } + break; + } + }); + + return transformed; + }, [statement, relationshipOptions]); + copiedSchema.title = ""; copiedSchema.properties.destinations.title = ""; copiedSchema.properties.statement_alerts.items.properties.alert_type.type = "number"; @@ -67,117 +398,27 @@ const StatementForm = forwardRef((props: any, ref: React.Ref>((acc, [key, prop]) => { const property = prop as { type?: string | string[]; title?: string }; - const isDropdown = Array.isArray(property.type) && property.type.includes("null"); - const isMultiSelect = property.type === "array"; - /* eslint-disable eqeqeq */ - const relationshipOption = relationshipOptions.find((option: any) => option.id == key)?.options.map((option: any) => ({ - label: option.name, - value: option.id - })); - + const relationshipData = relationshipOptions.find((option: any) => option.id.toString() === key); + + if (!relationshipData) { + // Skip if relationship data not loaded yet + return acc; + } + + const relationshipType = relationshipData.type as RelationshipType; + const widgetConfig = createWidgetConfig( + relationshipType, + key, + relationshipData, + property.title || "", + statement, + isDisabled, + refreshStatement, + ); + return { ...acc, - [key]: { - "ui:widget": isMultiSelect ? "AutocompleteWithChips" : (isDropdown ? "CustomSingleSelect" : "CustomTextField"), - "ui:options": isMultiSelect ? { - options: relationshipOption || [], - placeholder: "Select statement triples...", - data: statement?.statement_triples?.[key]?.map((item: any) => { - const relationship = relationshipOptions.find((rel: any) => rel.id === Number(key)); - const option = relationship?.options.find((opt: any) => opt.id === item.value); - return { - label: option?.name || '', - value: item.value - }; - }) || [], - - removeChip: async (id: number) => { - const deleteId = statement?.statement_triples?.[key]?.find((triple: any) => triple.value === id)?.id; - await statementService.deleteRelationship(deleteId); - refreshStatement(); - }, - label: property.title, - isDisabled, - onAutocompleteChange: async (event: any, newValue: any[]) => { - const lastSelectedValue = newValue[newValue.length - 1]; - const currentTriples = statement?.statement_triples?.[key] || []; - - // Check if the lastSelectedValue exists in currentTriples - const existingTriple = currentTriples.find((triple: any) => triple.value === lastSelectedValue?.value); - - // Check if the value exists in newValue (excluding lastSelectedValue) - const isDuplicateInNewValue = newValue.slice(0, -1).some((value: any) => value.value === lastSelectedValue?.value); - - if (existingTriple || isDuplicateInNewValue) { - // If it exists in currentTriples or is duplicated in newValue, it's a removal - if (existingTriple) { - await statementService.deleteRelationship(Number(existingTriple.id)); - } - } else if (lastSelectedValue) { - // If it doesn't exist in currentTriples and isn't duplicated in newValue, it's a new selection - await statementService.assignRelationship({ - id: key, - connectivity_statement: statement.id, - relationship: key, - value: Number(lastSelectedValue.value) - }); - } - - refreshStatement(); - } - } : { - data: relationshipOption || [], - onChange2: async (value: any) => { - const previousValue = statement?.statement_triples?.[key]?.id; - if (previousValue && value === null) { - await statementService.deleteRelationship(previousValue); - } else if (value !== null && !previousValue) { - await statementService.assignRelationship({ - id: key, - connectivity_statement: statement.id, - relationship: key, - value: value.toString() - }); - } else if (value !== null && previousValue) { - await statementService.updateRelationship(previousValue, { - connectivity_statement: statement.id, - relationship: key, - value: value - }); - } - - if (value !== statement?.statement_triples?.[key]?.value) { - refreshStatement(); - } - }, - onBlur2: async (value: any) => { - const previousValue = statement?.statement_triples?.[key]?.id; - - if (value.trim() === "" && previousValue) { - await statementService.deleteRelationship(previousValue); - } else if (!previousValue && value.trim() !== "") { - await statementService.assignRelationship({ - id: key, - connectivity_statement: statement.id, - relationship: key, - value: value - }); - } else if (previousValue) { - await statementService.updateRelationship(previousValue, { - connectivity_statement: statement.id, - relationship: key, - value: value - }); - } - if (value !== statement?.statement_triples?.[key]?.value) { - refreshStatement(); - } - }, - isDisabled, - value: statement?.statement_triples?.[key]?.value || '', - label: property.title, - } - } + [key]: widgetConfig }; }, {}) }; @@ -923,10 +1164,6 @@ const StatementForm = forwardRef((props: any, ref: React.Ref { - const property = prop as { type?: string | string[]; title?: string }; - property.title = ""; - }) // Add null option to the fields which have null type in dropdown. Object.keys(copiedSchema.properties).forEach((key) => { @@ -969,7 +1206,7 @@ const StatementForm = forwardRef((props: any, ref: React.Ref(null); @@ -435,8 +440,8 @@ export default function CustomEntitiesDropdown({ {disabledReason} ) : ( - - {label} + + {label} - - {label} + + {label} { + if (e.key === 'Enter' && !multiline) { + e.preventDefault(); + } + }} onBlur={(e=>{ if (onBlur2) { onBlur2(e.target.value); diff --git a/applications/composer/frontend/src/helpers/uriValidation.ts b/applications/composer/frontend/src/helpers/uriValidation.ts new file mode 100644 index 00000000..4d1295cd --- /dev/null +++ b/applications/composer/frontend/src/helpers/uriValidation.ts @@ -0,0 +1,70 @@ +/** + * Validates if a URI is a valid DOI, PMID, PMCID, or URL + * @param uri - The URI string to validate + * @returns true if the URI is valid, false otherwise + */ +export const isValidURI = (uri: string): boolean => { + if (!uri || !uri.trim()) { + return false; + } + + const trimmedUri = uri.trim(); + + // DOI patterns + const doiPatterns = [ + /^10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/, // Standard DOI format - no consecutive slashes + /^doi:10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/i, // DOI with prefix + /^https?:\/\/doi\.org\/10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/i, // DOI URL + /^https?:\/\/dx\.doi\.org\/10\.\d{4,}\/[a-zA-Z0-9\-._():]+(?:\/[a-zA-Z0-9\-._():]+)*$/i, // Alternative DOI URL + ]; + + // PMID patterns + const pmidPatterns = [ + /^PMID:\s*\d+$/i, // PMID with prefix + /^https?:\/\/pubmed\.ncbi\.nlm\.nih\.gov\/\d+\/?$/i, // PubMed URL + ]; + + // PMCID patterns + const pmcidPatterns = [ + /^PMC\d+$/i, // PMC ID format + /^PMCID:\s*PMC\d+$/i, // PMCID with prefix + /^https?:\/\/www\.ncbi\.nlm\.nih\.gov\/pmc\/articles\/PMC\d+\/?$/i, // PMC URL + ]; + + // URL pattern + const urlPattern = /^https?:\/\/[a-zA-Z0-9\-.]+(?::[0-9]+)?(?:\/[a-zA-Z0-9\-._~!$&'()*+,;=:@]+)*(?:\?[a-zA-Z0-9\-._~!$&'()*+,;=:@/?]*)?(?:#[a-zA-Z0-9\-._~!$&'()*+,;=:@/?]*)?$/i; + + // Check if it matches any of the valid patterns + const allPatterns = [...doiPatterns, ...pmidPatterns, ...pmcidPatterns, urlPattern]; + + return allPatterns.some(pattern => pattern.test(trimmedUri)); +}; + +/** + * Validates if a string is a valid URL (http/https) + * @param uri - The URI string to validate + * @returns true if the URI is a valid URL, false otherwise + */ +export const isValidUrl = (uri: string): boolean => { + const urlPattern = new RegExp('^(https?://)?'+ + '((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|'+ + '((\\d{1,3}\\.){3}\\d{1,3}))'+ + '(\\:\\d+)?(/[-a-z\\d%_.~+]*)*'+ + '(\\?[;&a-z\\d%_.~+=-]*)?'+ + '(#[-a-z\\d_]*)?$','i'); + + return !!uri.match(urlPattern); +}; + +/** + * Gets the validation error message for URI fields + * @param fieldName - Optional name of the field (e.g., "provenance", "expert consultant") + * @returns A formatted error message string + */ +export const getURIValidationErrorMessage = (fieldName: string = "URI"): string => { + return `Invalid ${fieldName} format. Please enter a valid:\n` + + "• DOI (e.g., '10.1000/xyz123' or 'https://doi.org/10.1000/xyz123')\n" + + "• PMID (e.g., 'PMID:12345678' or 'https://pubmed.ncbi.nlm.nih.gov/12345678')\n" + + "• PMCID (e.g., 'PMC1234567' or 'https://www.ncbi.nlm.nih.gov/pmc/articles/PMC1234567')\n" + + "• URL (e.g., 'https://example.com')"; +}; diff --git a/applications/composer/frontend/src/services/ExpertConsultantService.tsx b/applications/composer/frontend/src/services/ExpertConsultantService.tsx new file mode 100644 index 00000000..e29b16a2 --- /dev/null +++ b/applications/composer/frontend/src/services/ExpertConsultantService.tsx @@ -0,0 +1,27 @@ +import { composerApi } from "./apis" +import { AbstractService } from "./AbstractService" + +// Temporary type until API client is regenerated +interface ExpertConsultant { + id: number; + uri: string; + connectivity_statement_id: number; +} + +class ExpertConsultantService extends AbstractService { + async save(expertConsultant:any) { + return composerApi.composerConnectivityStatementAddExpertConsultantCreate( + expertConsultant.statementId, + { uri: expertConsultant.uri } + ).then((response: any) => response.data) + } + async delete(expertConsultantId: number, connectivityStatementId: number) { + return await composerApi.composerConnectivityStatementDelExpertConsultantDestroy(expertConsultantId, connectivityStatementId).then((response: any) => response.data) + } + async getObject(id: string): Promise { + return {} as ExpertConsultant + } +} + +const expertConsultantService = new ExpertConsultantService() +export default expertConsultantService diff --git a/applications/composer/frontend/src/services/JsonSchema.ts b/applications/composer/frontend/src/services/JsonSchema.ts index 614b6647..215ac432 100644 --- a/applications/composer/frontend/src/services/JsonSchema.ts +++ b/applications/composer/frontend/src/services/JsonSchema.ts @@ -6,6 +6,7 @@ export let jsonSchemas = (function () { let noteSchema:any = null; let tagSchema:any = null; let provenanceSchema:any = null; + let expertConsultantsSchema:any = null; let viaSchema:any = null; let speciesSchema:any = null; @@ -17,6 +18,7 @@ export let jsonSchemas = (function () { noteSchema = resp.data.Note tagSchema = resp.data.Tag provenanceSchema = resp.data.Provenance + expertConsultantsSchema = resp.data.ExpertConsultant viaSchema = resp.data.Via speciesSchema = resp.data.Specie }) @@ -36,6 +38,9 @@ export let jsonSchemas = (function () { getProvenanceSchema: function () { return provenanceSchema }, + getExpertConsultantsSchema: function () { + return expertConsultantsSchema + }, getViaSchema: function () { return viaSchema }, diff --git a/applications/composer/frontend/src/services/StatementService.ts b/applications/composer/frontend/src/services/StatementService.ts index a7064ec2..7f73c316 100644 --- a/applications/composer/frontend/src/services/StatementService.ts +++ b/applications/composer/frontend/src/services/StatementService.ts @@ -143,6 +143,30 @@ class ConnectivityStatementService extends AbstractService { return composerApi.composerConnectivityStatementTripleDestroy(id).then((response: any) => response.data); } + async assignTextRelationship(data: any): Promise { + return composerApi.composerConnectivityStatementTextCreate(data).then((response: any) => response.data); + } + + async updateTextRelationship(id: any, data: any): Promise { + return composerApi.composerConnectivityStatementTextUpdate(id, data).then((response: any) => response.data); + } + + async deleteTextRelationship(id: any): Promise { + return composerApi.composerConnectivityStatementTextDestroy(id).then((response: any) => response.data); + } + + async assignAnatomicalEntityRelationship(data: any): Promise { + return composerApi.composerConnectivityStatementAnatomicalEntityCreate(data).then((response: any) => response.data); + } + + async updateAnatomicalEntityRelationship(id: any, data: any): Promise { + return composerApi.composerConnectivityStatementAnatomicalEntityUpdate(id, data).then((response: any) => response.data); + } + + async deleteAnatomicalEntityRelationship(id: any): Promise { + return composerApi.composerConnectivityStatementAnatomicalEntityDestroy(id).then((response: any) => response.data); + } + async doTransition(connectivityStatement: ConnectivityStatement, transition: string) { const id = connectivityStatement.id || -1; return composerApi.composerConnectivityStatementDoTransitionCreate(id, transition, connectivityStatement).then((response: any) => response.data); diff --git a/applications/composer/frontend/src/types/relationshipTypes.ts b/applications/composer/frontend/src/types/relationshipTypes.ts new file mode 100644 index 00000000..c4ef06a7 --- /dev/null +++ b/applications/composer/frontend/src/types/relationshipTypes.ts @@ -0,0 +1,20 @@ +/** + * Enum for relationship types + * These correspond to the backend RelationshipTypeEnum values + */ +export enum RelationshipType { + TRIPLE_SINGLE = 'triple_single', + TRIPLE_MULTI = 'triple_multi', + TEXT = 'text', + ANATOMICAL_MULTI = 'anatomical_multi', +} + +/** + * Interface for relationship option returned from the API + */ +export interface RelationshipOption { + id: string | number; + name: string; + type: RelationshipType; + options?: Array<{ id: number; name: string }>; +} diff --git a/applications/composer/nginx.conf b/applications/composer/nginx.conf index fc95ddd2..90e2985c 100644 --- a/applications/composer/nginx.conf +++ b/applications/composer/nginx.conf @@ -37,9 +37,8 @@ http { proxy_pass http://0.0.0.0:8000; proxy_set_header Host $host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - client_max_body_size 50M; proxy_set_header X-Forwarded-Proto https; + client_max_body_size 50M; proxy_redirect off; proxy_connect_timeout 9000; diff --git a/applications/composer/openapi/openapi.yaml b/applications/composer/openapi/openapi.yaml index f7b43a1c..95c4c67e 100644 --- a/applications/composer/openapi/openapi.yaml +++ b/applications/composer/openapi/openapi.yaml @@ -405,6 +405,44 @@ paths: responses: '204': description: No response body + /api/composer/connectivity-statement/{id}/add_expert_consultant/: + post: + operationId: composer_connectivity_statement_add_expert_consultant_create + description: ConnectivityStatement + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement. + required: true + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ExpertConsultantCreate' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ExpertConsultantCreate' + multipart/form-data: + schema: + $ref: '#/components/schemas/ExpertConsultantCreate' + required: true + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + type: object + additionalProperties: {} + description: Unspecified response body + description: '' /api/composer/connectivity-statement/{id}/add_provenance/: post: operationId: composer_connectivity_statement_add_provenance_create @@ -560,6 +598,31 @@ paths: schema: $ref: '#/components/schemas/ConnectivityStatement' description: '' + /api/composer/connectivity-statement/{id}/del_expert_consultant/{expert_consultant_id}/: + delete: + operationId: composer_connectivity_statement_del_expert_consultant_destroy + description: ConnectivityStatement + parameters: + - in: path + name: expert_consultant_id + schema: + type: integer + required: true + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement. + required: true + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '204': + description: No response body /api/composer/connectivity-statement/{id}/del_provenance/{provenance_id}/: delete: operationId: composer_connectivity_statement_del_provenance_destroy @@ -949,10 +1012,364 @@ paths: schema: $ref: '#/components/schemas/BulkActionResponse' description: '' + /api/composer/connectivityStatementAnatomicalEntity/: + get: + operationId: composer_connectivityStatementAnatomicalEntity_list + description: 'ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based + relationships' + parameters: + - name: limit + required: false + in: query + description: Number of results to return per page. + schema: + type: integer + - name: offset + required: false + in: query + description: The initial index from which to return the results. + schema: + type: integer + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/PaginatedConnectivityStatementAnatomicalEntityList' + description: '' + post: + operationId: composer_connectivityStatementAnatomicalEntity_create + description: 'ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based + relationships' + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + multipart/form-data: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + required: true + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + description: '' + /api/composer/connectivityStatementAnatomicalEntity/{id}/: + get: + operationId: composer_connectivityStatementAnatomicalEntity_retrieve + description: 'ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based + relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + anatomical entity. + required: true + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + description: '' + put: + operationId: composer_connectivityStatementAnatomicalEntity_update + description: 'ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based + relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + anatomical entity. + required: true + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + multipart/form-data: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + required: true + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + description: '' + patch: + operationId: composer_connectivityStatementAnatomicalEntity_partial_update + description: 'ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based + relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + anatomical entity. + required: true + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedConnectivityStatementAnatomicalEntity' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedConnectivityStatementAnatomicalEntity' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedConnectivityStatementAnatomicalEntity' + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + description: '' + delete: + operationId: composer_connectivityStatementAnatomicalEntity_destroy + description: 'ConnectivityStatementAnatomicalEntity: Manage anatomical entity-based + relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + anatomical entity. + required: true + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '204': + description: No response body + /api/composer/connectivityStatementText/: + get: + operationId: composer_connectivityStatementText_list + description: 'ConnectivityStatementText: Manage text-based relationships' + parameters: + - name: limit + required: false + in: query + description: Number of results to return per page. + schema: + type: integer + - name: offset + required: false + in: query + description: The initial index from which to return the results. + schema: + type: integer + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/PaginatedConnectivityStatementTextList' + description: '' + post: + operationId: composer_connectivityStatementText_create + description: 'ConnectivityStatementText: Manage text-based relationships' + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + multipart/form-data: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + required: true + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + description: '' + /api/composer/connectivityStatementText/{id}/: + get: + operationId: composer_connectivityStatementText_retrieve + description: 'ConnectivityStatementText: Manage text-based relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + text. + required: true + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + description: '' + put: + operationId: composer_connectivityStatementText_update + description: 'ConnectivityStatementText: Manage text-based relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + text. + required: true + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + multipart/form-data: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + required: true + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + description: '' + patch: + operationId: composer_connectivityStatementText_partial_update + description: 'ConnectivityStatementText: Manage text-based relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + text. + required: true + tags: + - composer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PatchedConnectivityStatementText' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/PatchedConnectivityStatementText' + multipart/form-data: + schema: + $ref: '#/components/schemas/PatchedConnectivityStatementText' + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectivityStatementText' + description: '' + delete: + operationId: composer_connectivityStatementText_destroy + description: 'ConnectivityStatementText: Manage text-based relationships' + parameters: + - in: path + name: id + schema: + type: integer + description: A unique integer value identifying this connectivity statement + text. + required: true + tags: + - composer + security: + - tokenAuth: [] + - basicAuth: [] + - cookieAuth: [] + responses: + '204': + description: No response body /api/composer/connectivityStatementTriple/: get: operationId: composer_connectivityStatementTriple_list - description: 'ConnectivityStatementTriple:' + description: 'ConnectivityStatementTriple: Manage triple-based relationships' parameters: - name: limit required: false @@ -981,7 +1398,7 @@ paths: description: '' post: operationId: composer_connectivityStatementTriple_create - description: 'ConnectivityStatementTriple:' + description: 'ConnectivityStatementTriple: Manage triple-based relationships' tags: - composer requestBody: @@ -1010,7 +1427,7 @@ paths: /api/composer/connectivityStatementTriple/{id}/: get: operationId: composer_connectivityStatementTriple_retrieve - description: 'ConnectivityStatementTriple:' + description: 'ConnectivityStatementTriple: Manage triple-based relationships' parameters: - in: path name: id @@ -1034,7 +1451,7 @@ paths: description: '' put: operationId: composer_connectivityStatementTriple_update - description: 'ConnectivityStatementTriple:' + description: 'ConnectivityStatementTriple: Manage triple-based relationships' parameters: - in: path name: id @@ -1070,7 +1487,7 @@ paths: description: '' patch: operationId: composer_connectivityStatementTriple_partial_update - description: 'ConnectivityStatementTriple:' + description: 'ConnectivityStatementTriple: Manage triple-based relationships' parameters: - in: path name: id @@ -1105,7 +1522,7 @@ paths: description: '' delete: operationId: composer_connectivityStatementTriple_destroy - description: 'ConnectivityStatementTriple:' + description: 'ConnectivityStatementTriple: Manage triple-based relationships' parameters: - in: path name: id @@ -3283,6 +3700,10 @@ components: type: array items: $ref: '#/components/schemas/Provenance' + expert_consultants: + type: array + items: + $ref: '#/components/schemas/ExpertConsultant' owner: allOf: - $ref: '#/components/schemas/User' @@ -3409,6 +3830,12 @@ components: statement_triples: type: string readOnly: true + statement_texts: + type: string + readOnly: true + statement_anatomical_entities: + type: string + readOnly: true required: - available_transitions - entities_journey @@ -3425,11 +3852,36 @@ components: - sentence - sex - state + - statement_anatomical_entities - statement_preview + - statement_texts - statement_triples - tags - ConnectivityStatementTriple: + ConnectivityStatementAnatomicalEntity: + type: object + description: Serializer for anatomical entity-based relationships + properties: + id: + type: integer + readOnly: true + connectivity_statement: + type: integer + relationship: + type: integer + anatomical_entities: + type: array + items: + type: integer + writeOnly: true + writeOnly: true + required: + - anatomical_entities + - connectivity_statement + - id + - relationship + ConnectivityStatementText: type: object + description: Serializer for text-based relationships (free text area) properties: id: type: integer @@ -3438,14 +3890,36 @@ components: type: integer relationship: type: integer - value: + text: type: string + required: + - connectivity_statement + - id + - relationship + - text + ConnectivityStatementTriple: + type: object + description: Serializer for triple-based relationships (single/multi select + from triples) + properties: + id: + type: integer readOnly: true + connectivity_statement: + type: integer + relationship: + type: integer + triples: + type: array + items: + type: integer + writeOnly: true + writeOnly: true required: - connectivity_statement - id - relationship - - value + - triples ConnectivityStatementUpdate: type: object description: Connectivity Statement @@ -3662,6 +4136,29 @@ components: - AFFERENT-T - UNKNOWN type: string + ExpertConsultant: + type: object + description: Expert Consultant + properties: + id: + type: integer + readOnly: true + uri: + type: string + connectivity_statement_id: + type: integer + required: + - connectivity_statement_id + - id + - uri + ExpertConsultantCreate: + type: object + description: Serializer for creating expert consultant via request body + properties: + uri: + type: string + required: + - uri GraphState: type: object properties: @@ -3918,6 +4415,46 @@ components: type: array items: $ref: '#/components/schemas/BaseConnectivityStatement' + PaginatedConnectivityStatementAnatomicalEntityList: + type: object + properties: + count: + type: integer + example: 123 + next: + type: string + nullable: true + format: uri + example: http://api.example.org/accounts/?offset=400&limit=100 + previous: + type: string + nullable: true + format: uri + example: http://api.example.org/accounts/?offset=200&limit=100 + results: + type: array + items: + $ref: '#/components/schemas/ConnectivityStatementAnatomicalEntity' + PaginatedConnectivityStatementTextList: + type: object + properties: + count: + type: integer + example: 123 + next: + type: string + nullable: true + format: uri + example: http://api.example.org/accounts/?offset=400&limit=100 + previous: + type: string + nullable: true + format: uri + example: http://api.example.org/accounts/?offset=200&limit=100 + results: + type: array + items: + $ref: '#/components/schemas/ConnectivityStatementText' PaginatedConnectivityStatementTripleList: type: object properties: @@ -4223,6 +4760,10 @@ components: type: array items: $ref: '#/components/schemas/Provenance' + expert_consultants: + type: array + items: + $ref: '#/components/schemas/ExpertConsultant' owner: allOf: - $ref: '#/components/schemas/User' @@ -4349,8 +4890,15 @@ components: statement_triples: type: string readOnly: true - PatchedConnectivityStatementTriple: + statement_texts: + type: string + readOnly: true + statement_anatomical_entities: + type: string + readOnly: true + PatchedConnectivityStatementAnatomicalEntity: type: object + description: Serializer for anatomical entity-based relationships properties: id: type: integer @@ -4359,9 +4907,43 @@ components: type: integer relationship: type: integer - value: + anatomical_entities: + type: array + items: + type: integer + writeOnly: true + writeOnly: true + PatchedConnectivityStatementText: + type: object + description: Serializer for text-based relationships (free text area) + properties: + id: + type: integer + readOnly: true + connectivity_statement: + type: integer + relationship: + type: integer + text: type: string + PatchedConnectivityStatementTriple: + type: object + description: Serializer for triple-based relationships (single/multi select + from triples) + properties: + id: + type: integer readOnly: true + connectivity_statement: + type: integer + relationship: + type: integer + triples: + type: array + items: + type: integer + writeOnly: true + writeOnly: true PatchedConnectivityStatementUpdate: type: object description: Connectivity Statement @@ -4568,7 +5150,7 @@ components: external_ref: type: string nullable: true - maxLength: 20 + maxLength: 100 tags: type: array items: @@ -4777,8 +5359,9 @@ components: - type RelationshipTypeEnum: enum: - - single - - multi + - triple_single + - triple_multi + - anatomical_multi - text type: string Sentence: @@ -4809,7 +5392,7 @@ components: external_ref: type: string nullable: true - maxLength: 20 + maxLength: 100 tags: type: array items: diff --git a/applications/composer/scripts/setup_cleanup_cron.sh b/applications/composer/scripts/setup_cleanup_cron.sh new file mode 100644 index 00000000..882fb44f --- /dev/null +++ b/applications/composer/scripts/setup_cleanup_cron.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# Script to set up a daily cron job for cleaning up old ingestion files +# This script is idempotent - it only adds the cron job if it doesn't exist + +set -e + +# Configuration +CRON_COMMAND="cd /app && python manage.py cleanup_old_files --days 30" +CRON_SCHEDULE="0 2 * * *" # Run at 2 AM every day +CRON_JOB="$CRON_SCHEDULE $CRON_COMMAND" + +echo "Setting up daily cleanup cron job..." + +# Check if cron is installed +if ! command -v crontab &> /dev/null; then + echo "ERROR: crontab command not found. Please install cron." + exit 1 +fi + +# Get current crontab +CURRENT_CRONTAB=$(crontab -l 2>/dev/null || echo "") + +# Check if the job already exists +if echo "$CURRENT_CRONTAB" | grep -F "cleanup_old_files" > /dev/null; then + echo "Cleanup cron job already exists. Skipping..." + echo "Current entry:" + echo "$CURRENT_CRONTAB" | grep -F "cleanup_old_files" +else + # Add the new cron job + (echo "$CURRENT_CRONTAB"; echo "$CRON_JOB") | crontab - + echo "Successfully added cleanup cron job:" + echo "$CRON_JOB" +fi + +echo "" +echo "Current crontab:" +crontab -l + +echo "" +echo "Setup complete!" diff --git a/applications/composer/tasks/neurondm/Dockerfile b/applications/composer/tasks/neurondm/Dockerfile new file mode 100644 index 00000000..bf8ffef2 --- /dev/null +++ b/applications/composer/tasks/neurondm/Dockerfile @@ -0,0 +1,47 @@ +ARG COMPOSER +FROM $COMPOSER as composer-files + +# Stage 2: Build the lightweight neurondm task image +FROM python:3.12-slim + +WORKDIR /usr/src/app + +# Install system dependencies including build tools needed for pyxattr +RUN apt-get update && apt-get install -y \ + git \ + gcc \ + python3-dev \ + libattr1-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install ONLY neurondm and pyontutils packages +RUN pip install --no-cache-dir \ + neurondm==0.1.8 \ + pyontutils==0.1.38 \ + rdflib + +# Copy only the necessary composer modules from the composer image +# These are pure Python modules with no Django dependencies +COPY --from=composer-files /usr/src/app/composer/__init__.py /usr/src/app/composer/ +COPY --from=composer-files /usr/src/app/composer/pure_enums.py /usr/src/app/composer/ +COPY --from=composer-files /usr/src/app/composer/services/__init__.py /usr/src/app/composer/services/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/__init__.py /usr/src/app/composer/services/cs_ingestion/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/exceptions.py /usr/src/app/composer/services/cs_ingestion/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/models.py /usr/src/app/composer/services/cs_ingestion/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/logging_service.py /usr/src/app/composer/services/cs_ingestion/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/neurondm_script.py /usr/src/app/composer/services/cs_ingestion/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/helpers/__init__.py /usr/src/app/composer/services/cs_ingestion/helpers/ +COPY --from=composer-files /usr/src/app/composer/services/cs_ingestion/helpers/common_helpers.py /usr/src/app/composer/services/cs_ingestion/helpers/ + +# Copy the standalone script from build context +COPY process_neurondm_standalone.py /usr/src/app/ + +# Clone NIF-Ontology repository (required by neurondm) +RUN git clone -b neurons https://github.com/SciCrunch/NIF-Ontology.git /usr/src/app/NIF-Ontology + +# Set ontology local repo +RUN ontutils set ontology-local-repo /usr/src/app/NIF-Ontology/ + +ENV PYTHONUNBUFFERED=1 + +CMD ["python", "process_neurondm_standalone.py"] diff --git a/applications/composer/tasks/neurondm/process_neurondm_standalone.py b/applications/composer/tasks/neurondm/process_neurondm_standalone.py new file mode 100644 index 00000000..6f3140f1 --- /dev/null +++ b/applications/composer/tasks/neurondm/process_neurondm_standalone.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 +""" +Standalone script to run neurondm processing without Django. +This script is designed to run in the composer-neurondm task container. +""" +import sys +import json +import argparse +import logging + +sys.path.insert(0, '/usr/src/app') +from composer.services.cs_ingestion.neurondm_script import main as get_statements_from_neurondm +from composer.services.cs_ingestion.logging_service import LoggerService +from composer.services.cs_ingestion.models import convert_statement_to_json_serializable + +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + + +def main(): + parser = argparse.ArgumentParser( + description='Process NeuroDM neurons and save to file' + ) + parser.add_argument( + '--input_filepath', + type=str, + required=True, + help='Path to input composer data JSON file (from Step 0)' + ) + parser.add_argument( + '--output_filepath', + type=str, + required=True, + help='Path to output JSON file with processed statements' + ) + parser.add_argument( + '--full_imports', + nargs='*', + default=[], + help='List of full imports' + ) + parser.add_argument( + '--label_imports', + nargs='*', + default=[], + help='List of label imports' + ) + parser.add_argument( + '--population_file', + type=str, + help='Path to population URIs file' + ) + parser.add_argument( + '--anomalies_csv_output', + type=str, + required=True, + help='Path to output CSV file for anomalies' + ) + parser.add_argument( + '--ingested_csv_output', + type=str, + required=True, + help='Path to output CSV file for ingested statements' + ) + + args = parser.parse_args() + + # Read population URIs if provided + population_uris = None + if args.population_file: + try: + with open(args.population_file, 'r', encoding='utf-8') as f: + population_uris = set(line.strip() for line in f if line.strip()) + logger.info(f"Loaded {len(population_uris)} population URIs from {args.population_file}") + except Exception as e: + logger.error(f"Error reading population file: {e}") + sys.exit(1) + + # Read composer data (custom relationships and alert URIs) + try: + with open(args.input_filepath, 'r', encoding='utf-8') as f: + composer_data = json.load(f) + + custom_relationships = composer_data.get('custom_relationships', []) + statement_alert_uris = set(composer_data.get('statement_alert_uris', [])) + + logger.info( + f"Loaded {len(custom_relationships)} custom relationships and " + f"{len(statement_alert_uris)} alert URIs from {args.input_filepath}" + ) + except Exception as e: + logger.error(f"Error reading composer data file: {e}") + sys.exit(1) + + # Initialize logger service with explicit paths (no Django dependency) + logger_service = LoggerService( + ingestion_anomalies_log_path=args.anomalies_csv_output, + ingested_log_path=args.ingested_csv_output + ) + + try: + logger.info("Starting NeuroDM processing...") + + # Call neurondm_script directly + statements_list = get_statements_from_neurondm( + full_imports=args.full_imports, + label_imports=args.label_imports, + logger_service_param=logger_service, + statement_alert_uris=statement_alert_uris, + population_uris=population_uris, + custom_relationships=custom_relationships, + ) + + logger.info(f"Processed {len(statements_list)} statements") + + # Convert statements to JSON-serializable format + logger.info("Converting statements to JSON-serializable format...") + json_statements = [convert_statement_to_json_serializable(stmt) for stmt in statements_list] + + # Save to JSON file + with open(args.output_filepath, 'w', encoding='utf-8') as f: + json.dump(json_statements, f, indent=2) + + logger.info(f"Successfully saved statements to {args.output_filepath}") + + + logger_service.write_anomalies_to_file() + logger.info(f"Saved {len(logger_service.anomalies)} anomalies to {logger_service.anomalies_log_path}") + + sys.exit(0) + + except Exception as e: + logger.error(f"Processing failed: {e}", exc_info=True) + sys.exit(1) + + +if __name__ == '__main__': + main() + +# This is a comment that I change every time I want to force the image to be rebuilt! diff --git a/applications/composer/tasks/notify/notify.py b/applications/composer/tasks/notify/notify.py index 7f079c6e..6bd0ca29 100644 --- a/applications/composer/tasks/notify/notify.py +++ b/applications/composer/tasks/notify/notify.py @@ -13,12 +13,22 @@ "smtp_host": "mail.metacell", "smtp_port": 587, "sender_email": "noreply@metacell.us", - "subject_template": "Composer export completed with status: {status}", - "message_success": "You can download the exported file here:\n{file_url}\n", - "message_failure": ( - "You can retry the export from the dashboard. " - "If the issue persists, please contact us through our support channels.\n" - ) + "export": { + "subject_template": "Composer export completed with status: {status}", + "message_success": "You can download the exported file here:\n{file_url}\n", + "message_failure": ( + "You can retry the export from the dashboard. " + "If the issue persists, please contact us through our support channels.\n" + ) + }, + "ingestion": { + "subject_template": "Composer connectivity statements ingestion completed with status: {status}", + "message_success": "The connectivity statements have been successfully ingested into the database.\n", + "message_failure": ( + "You can retry the ingestion from the dashboard. " + "If the issue persists, please contact us through our support channels.\n" + ) + } } # === Read from environment === @@ -34,22 +44,27 @@ sys.exit(1) recipient = payload.get("email", "").strip() +workflow_type = payload.get("type", "export").strip() # 'export' or 'ingestion' file_url = payload.get("file_url", "").strip() if not recipient: logging.error("[NOTIFY] No valid recipient email provided in payload.") sys.exit(1) +if workflow_type not in CONFIG: + logging.error(f"[NOTIFY] Unknown workflow type: {workflow_type}") + sys.exit(1) + +# === Get configuration for workflow type === +workflow_config = CONFIG[workflow_type] + # === Prepare message === sender = CONFIG["sender_email"] -subject = CONFIG["subject_template"].format(status=status) -include_file = status.lower() == "succeeded" +subject = workflow_config["subject_template"].format(status=status) +include_file = status.lower() == "succeeded" and file_url -# === Validate file_url === +# === Validate file_url if needed === if include_file: - if not file_url: - logging.error("[NOTIFY] file_url is required for successful workflows.") - sys.exit(1) try: response = requests.head(file_url, allow_redirects=True, timeout=5) if response.status_code >= 400: @@ -62,12 +77,15 @@ # === Compose email body === body_lines = [ "Dear user,\n", - f"Your composer export finished with status: {status}.\n" + f"Your composer {workflow_type} finished with status: {status}.\n" ] -if include_file: - body_lines.append(CONFIG["message_success"].format(file_url=file_url)) +if status.lower() == "succeeded": + if include_file: + body_lines.append(workflow_config["message_success"].format(file_url=file_url)) + else: + body_lines.append(workflow_config["message_success"]) else: - body_lines.append(CONFIG["message_failure"]) + body_lines.append(workflow_config["message_failure"]) # === Send email === msg = EmailMessage() diff --git a/deployment/codefresh-dev.yaml b/deployment/codefresh-dev.yaml index 7caa89da..48fdc98e 100644 --- a/deployment/codefresh-dev.yaml +++ b/deployment/codefresh-dev.yaml @@ -34,8 +34,8 @@ steps: working_directory: . commands: - bash cloud-harness/install.sh - - harness-deployment cloud-harness . -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} - -n ${{NAMESPACE}} --write-env -e dev -i composer + - harness-deployment cloud-harness . -d ${{DOMAIN}} -r ${{REGISTRY}} -rs '${{REGISTRY_SECRET}}' + -n ${{NAMESPACE}} --write-env -e dev --cache-url '${{IMAGE_CACHE_URL}}' -i composer - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: @@ -47,11 +47,77 @@ steps: image: codefresh/cfstep-helm:3.6.2 stage: prepare title: View helm chart - build_base_images: - title: Build base images + build_application_images_0: type: parallel stage: build steps: + accounts: + type: build + stage: build + dockerfile: Dockerfile + registry: '${{CODEFRESH_REGISTRY}}' + buildkit: true + build_arguments: + - NOCACHE=${{CF_BUILD_ID}} + image_name: cloud-harness/accounts + title: Accounts + working_directory: ./cloud-harness/applications/accounts + tags: + - '${{ACCOUNTS_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' + when: + condition: + any: + buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') + == true + forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') + == false + cloudharness-base: + type: build + stage: build + dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile + registry: '${{CODEFRESH_REGISTRY}}' + buildkit: true + build_arguments: + - NOCACHE=${{CF_BUILD_ID}} + image_name: cloud-harness/cloudharness-base + title: Cloudharness base + working_directory: ./cloud-harness + tags: + - '${{CLOUDHARNESS_BASE_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' + when: + condition: + any: + buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}') + == true + forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}') + == false + test-e2e: + type: build + stage: build + dockerfile: Dockerfile + registry: '${{CODEFRESH_REGISTRY}}' + buildkit: true + build_arguments: + - NOCACHE=${{CF_BUILD_ID}} + image_name: cloud-harness/test-e2e + title: Test e2e + working_directory: ./cloud-harness/test/test-e2e + tags: + - '${{TEST_E2E_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' + - latest + when: + condition: + any: + buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}') + == true + forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}') + == false cloudharness-frontend-build: type: build stage: build @@ -59,13 +125,14 @@ steps: registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/cloudharness-frontend-build + image_name: cloud-harness/cloudharness-frontend-build title: Cloudharness frontend build working_directory: ./cloud-harness - tag: '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' + tags: + - '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' when: condition: any: @@ -73,79 +140,85 @@ steps: '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false - cloudharness-base-debian: + composer-notify: type: build stage: build - dockerfile: infrastructure/base-images/cloudharness-base-debian/Dockerfile + dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/cloudharness-base-debian - title: Cloudharness base debian - working_directory: ./cloud-harness - tag: '${{CLOUDHARNESS_BASE_DEBIAN_TAG}}' + image_name: cloud-harness/composer-notify + title: Composer notify + working_directory: ./applications/composer/tasks/notify + tags: + - '${{COMPOSER_NOTIFY_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_DEBIAN_TAG_EXISTS}}', - '{{CLOUDHARNESS_BASE_DEBIAN_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_BASE_DEBIAN_TAG_FORCE_BUILD}}', - '{{CLOUDHARNESS_BASE_DEBIAN_TAG_FORCE_BUILD}}') == false - build_static_images: - title: Build static images + buildDoesNotExist: includes('${{COMPOSER_NOTIFY_TAG_EXISTS}}', '{{COMPOSER_NOTIFY_TAG_EXISTS}}') + == true + forceNoCache: includes('${{COMPOSER_NOTIFY_TAG_FORCE_BUILD}}', '{{COMPOSER_NOTIFY_TAG_FORCE_BUILD}}') + == false + title: Build parallel step 1 + build_application_images_1: type: parallel stage: build steps: - cloudharness-django: + test-api: type: build stage: build - dockerfile: Dockerfile + dockerfile: test/test-api/Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE_DEBIAN=${{REGISTRY}}/cloudharness/cloudharness-base-debian:${{CLOUDHARNESS_BASE_DEBIAN_TAG}} - image_name: cloudharness/cloudharness-django - title: Cloudharness django - working_directory: ./cloud-harness/infrastructure/common-images/cloudharness-django - tag: '${{CLOUDHARNESS_DJANGO_TAG}}' + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + image_name: cloud-harness/test-api + title: Test api + working_directory: ./cloud-harness + tags: + - '${{TEST_API_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' + - latest when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_DJANGO_TAG_EXISTS}}', '{{CLOUDHARNESS_DJANGO_TAG_EXISTS}}') + buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}') == false - build_application_images: - type: parallel - stage: build - steps: - accounts: + cloudharness-django: type: build stage: build dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/accounts - title: Accounts - working_directory: ./cloud-harness/applications/accounts - tag: '${{ACCOUNTS_TAG}}' + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + image_name: cloud-harness/cloudharness-django + title: Cloudharness django + working_directory: ./cloud-harness/infrastructure/common-images/cloudharness-django + tags: + - '${{CLOUDHARNESS_DJANGO_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' when: condition: any: - buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') + buildDoesNotExist: includes('${{CLOUDHARNESS_DJANGO_TAG_EXISTS}}', '{{CLOUDHARNESS_DJANGO_TAG_EXISTS}}') == true - forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_DJANGO_TAG_FORCE_BUILD}}') == false + title: Build parallel step 2 + build_application_images_2: + type: parallel + stage: build + steps: composer: type: build stage: build @@ -153,15 +226,16 @@ steps: registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} - - CLOUDHARNESS_DJANGO=${{REGISTRY}}/cloudharness/cloudharness-django:${{CLOUDHARNESS_DJANGO_TAG}} - image_name: cloudharness/composer + - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloud-harness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} + - CLOUDHARNESS_DJANGO=${{REGISTRY}}/cloud-harness/cloudharness-django:${{CLOUDHARNESS_DJANGO_TAG}} + image_name: cloud-harness/composer title: Composer working_directory: ./applications/composer - tag: '${{COMPOSER_TAG}}' + tags: + - '${{COMPOSER_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' when: condition: any: @@ -169,27 +243,35 @@ steps: == true forceNoCache: includes('${{COMPOSER_TAG_FORCE_BUILD}}', '{{COMPOSER_TAG_FORCE_BUILD}}') == false - composer-notify: + title: Build parallel step 3 + build_application_images_3: + type: parallel + stage: build + steps: + composer-neurondm: type: build stage: build dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/composer-notify - title: Composer notify - working_directory: ./applications/composer/tasks/notify - tag: '${{COMPOSER_NOTIFY_TAG}}' + - COMPOSER=${{REGISTRY}}/cloud-harness/composer:${{COMPOSER_TAG}} + image_name: cloud-harness/composer-neurondm + title: Composer neurondm + working_directory: ./applications/composer/tasks/neurondm + tags: + - '${{COMPOSER_NEURONDM_TAG}}' + - '${{DEPLOYMENT_PUBLISH_TAG}}-dev' + - '${{CF_BRANCH_TAG_NORMALIZED_LOWER_CASE}}' when: condition: any: - buildDoesNotExist: includes('${{COMPOSER_NOTIFY_TAG_EXISTS}}', '{{COMPOSER_NOTIFY_TAG_EXISTS}}') + buildDoesNotExist: includes('${{COMPOSER_NEURONDM_TAG_EXISTS}}', '{{COMPOSER_NEURONDM_TAG_EXISTS}}') == true - forceNoCache: includes('${{COMPOSER_NOTIFY_TAG_FORCE_BUILD}}', '{{COMPOSER_NOTIFY_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{COMPOSER_NEURONDM_TAG_FORCE_BUILD}}', '{{COMPOSER_NEURONDM_TAG_FORCE_BUILD}}') == false + title: Build parallel step 4 deployment: stage: deploy type: helm @@ -208,6 +290,21 @@ steps: custom_values: - apps_composer_harness_secrets_SECRET__KEY=${{SECRET__KEY}} - apps_composer_harness_secrets_SOCIAL__AUTH__ORCID__SECRET=${{SOCIAL__AUTH__ORCID__SECRET}} + wait_deployment: + stage: qa + title: Wait deployment to be ready + image: codefresh/kubectl + commands: + - kubectl config use-context ${{CLUSTER_NAME}} + - kubectl config set-context --current --namespace=${{NAMESPACE}} + - kubectl rollout status deployment/accounts + - kubectl rollout status deployment/argo-gk + - kubectl rollout status deployment/composer + - sleep 60 + when: + condition: + all: + whenVarExists: includes("${{SKIP_TESTS}}", "{{SKIP_TESTS}}") == true approval: type: pending-approval stage: publish @@ -224,49 +321,64 @@ steps: stage: publish type: push title: Cloudharness frontend build - candidate: '${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest registry: '${{REGISTRY_PUBLISH_URL}}' - publish_cloudharness-base-debian: + publish_cloudharness-base: stage: publish type: push - title: Cloudharness base debian - candidate: '${{REGISTRY}}/cloudharness/cloudharness-base-debian:${{CLOUDHARNESS_BASE_DEBIAN_TAG}}' + title: Cloudharness base + candidate: '${{REGISTRY}}/cloud-harness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest registry: '${{REGISTRY_PUBLISH_URL}}' publish_cloudharness-django: stage: publish type: push title: Cloudharness django - candidate: '${{REGISTRY}}/cloudharness/cloudharness-django:${{CLOUDHARNESS_DJANGO_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/cloudharness-django:${{CLOUDHARNESS_DJANGO_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest registry: '${{REGISTRY_PUBLISH_URL}}' publish_accounts: stage: publish type: push title: Accounts - candidate: '${{REGISTRY}}/cloudharness/accounts:${{ACCOUNTS_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/accounts:${{ACCOUNTS_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest registry: '${{REGISTRY_PUBLISH_URL}}' publish_composer: stage: publish type: push title: Composer - candidate: '${{REGISTRY}}/cloudharness/composer:${{COMPOSER_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/composer:${{COMPOSER_TAG}}' + tags: + - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest + registry: '${{REGISTRY_PUBLISH_URL}}' + publish_composer-neurondm: + stage: publish + type: push + title: Composer neurondm + candidate: '${{REGISTRY}}/cloud-harness/composer-neurondm:${{COMPOSER_NEURONDM_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest registry: '${{REGISTRY_PUBLISH_URL}}' publish_composer-notify: stage: publish type: push title: Composer notify - candidate: '${{REGISTRY}}/cloudharness/composer-notify:${{COMPOSER_NOTIFY_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/composer-notify:${{COMPOSER_NOTIFY_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest registry: '${{REGISTRY_PUBLISH_URL}}' when: condition: diff --git a/deployment/codefresh-stage.yaml b/deployment/codefresh-stage.yaml index 105e2e97..8ac3c74c 100644 --- a/deployment/codefresh-stage.yaml +++ b/deployment/codefresh-stage.yaml @@ -34,7 +34,8 @@ steps: commands: - bash cloud-harness/install.sh - harness-deployment cloud-harness . -t ${{DEPLOYMENT_TAG}} -d ${{DOMAIN}} -r - ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -n ${{NAMESPACE}} -e stage -i composer + ${{REGISTRY}} -rs '${{REGISTRY_SECRET}}' -n ${{NAMESPACE}} -e stage --no-cd -i + composer prepare_deployment_view: commands: - helm template ./deployment/helm --debug -n ${{NAMESPACE}} @@ -62,6 +63,17 @@ steps: custom_values: - apps_composer_harness_secrets_SECRET__KEY="${{SECRET__KEY}}" - apps_composer_harness_secrets_SOCIAL__AUTH__ORCID__SECRET="${{SOCIAL__AUTH__ORCID__SECRET}}" + wait_deployment: + stage: qa + title: Wait deployment to be ready + image: codefresh/kubectl + commands: + - kubectl config use-context ${{CLUSTER_NAME}} + - kubectl config set-context --current --namespace=${{NAMESPACE}} + - kubectl rollout status deployment/argo-gk + - kubectl rollout status deployment/accounts + - kubectl rollout status deployment/composer + - sleep 60 manual_tests: type: pending-approval stage: publish @@ -86,16 +98,16 @@ steps: stage: publish type: push title: Cloudharness frontend build - candidate: '${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{DEPLOYMENT_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/cloudharness-frontend-build:${{DEPLOYMENT_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' - latest registry: '${{REGISTRY_PUBLISH_URL}}' - publish_cloudharness-base-debian: + publish_cloudharness-base: stage: publish type: push - title: Cloudharness base debian - candidate: '${{REGISTRY}}/cloudharness/cloudharness-base-debian:${{DEPLOYMENT_TAG}}' + title: Cloudharness base + candidate: '${{REGISTRY}}/cloud-harness/cloudharness-base:${{DEPLOYMENT_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' - latest @@ -104,7 +116,7 @@ steps: stage: publish type: push title: Cloudharness django - candidate: '${{REGISTRY}}/cloudharness/cloudharness-django:${{DEPLOYMENT_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/cloudharness-django:${{DEPLOYMENT_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' - latest @@ -113,7 +125,7 @@ steps: stage: publish type: push title: Accounts - candidate: '${{REGISTRY}}/cloudharness/accounts:${{DEPLOYMENT_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/accounts:${{DEPLOYMENT_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' - latest @@ -122,7 +134,16 @@ steps: stage: publish type: push title: Composer - candidate: '${{REGISTRY}}/cloudharness/composer:${{DEPLOYMENT_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/composer:${{DEPLOYMENT_TAG}}' + tags: + - '${{DEPLOYMENT_PUBLISH_TAG}}' + - latest + registry: '${{REGISTRY_PUBLISH_URL}}' + publish_composer-neurondm: + stage: publish + type: push + title: Composer neurondm + candidate: '${{REGISTRY}}/cloud-harness/composer-neurondm:${{DEPLOYMENT_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' - latest @@ -131,7 +152,7 @@ steps: stage: publish type: push title: Composer notify - candidate: '${{REGISTRY}}/cloudharness/composer-notify:${{DEPLOYMENT_TAG}}' + candidate: '${{REGISTRY}}/cloud-harness/composer-notify:${{DEPLOYMENT_TAG}}' tags: - '${{DEPLOYMENT_PUBLISH_TAG}}' - latest