From a1e54084ed3427148fbbe62b9291b8629fb0371f Mon Sep 17 00:00:00 2001 From: Melissa Bristow Date: Sat, 2 Oct 2021 18:49:57 +0100 Subject: [PATCH] Malvern app updates --- PyStationB/environment.yml | 2 +- PyStationB/libraries/ABEX/requirements.txt | 1 + PyStationB/libraries/PyBCKG/setup.py | 2 +- .../libraries/Utilities/psbutils/misc.py | 2 +- PyStationB/projects/Malvern/.amlignore | 46 ++ PyStationB/projects/Malvern/.coveragerc | 13 + .../projects/Malvern/{api => }/.flaskenv | 2 +- PyStationB/projects/Malvern/README.md | 30 +- PyStationB/projects/Malvern/api/__init__.py | 11 + PyStationB/projects/Malvern/api/app.py | 403 ---------- PyStationB/projects/Malvern/api/routes.py | 600 +++++++++++++++ PyStationB/projects/Malvern/environment.yml | 4 + PyStationB/projects/Malvern/package.json | 4 +- PyStationB/projects/Malvern/public/index.html | 4 +- .../projects/Malvern/requirements_dev.txt | 11 + .../Malvern/src/actions/ConfigActions.ts | 11 +- .../Malvern/src/actions/DatasetActionTypes.ts | 35 +- .../Malvern/src/actions/DatasetActions.ts | 70 +- .../src/actions/ExperimentActionTypes.ts | 8 +- .../Malvern/src/actions/ExperimentActions.ts | 46 +- .../Malvern/src/components/Interfaces.tsx | 13 + .../src/components/utils/validators.tsx | 16 + .../Malvern/src/pages/CloneExperimentPage.tsx | 75 +- .../src/pages/NewExperimentFormPage.tsx | 127 ++-- .../Malvern/src/pages/NewIterationPage.tsx | 195 +++-- .../src/pages/ViewExperimentResultsPage.tsx | 70 +- .../Malvern/src/reducers/ConfigReducers.ts | 12 +- .../Malvern/src/reducers/DatasetReducers.ts | 39 +- .../src/reducers/ExperimentsReducers.ts | 40 +- .../Malvern/src/reducers/RootReducer.ts | 12 +- .../Malvern/src/reducers/reducerInterfaces.ts | 80 ++ .../Malvern/src/store/configureStore.tsx | 13 +- .../projects/Malvern/src/store/connectors.tsx | 9 +- .../projects/Malvern/tests/test_routes.py | 698 ++++++++++++++++++ PyStationB/requirements.txt | 4 +- PyStationB/scripts/check_requirements.py | 18 +- 36 files changed, 1960 insertions(+), 766 deletions(-) create mode 100644 PyStationB/projects/Malvern/.amlignore create mode 100644 PyStationB/projects/Malvern/.coveragerc rename PyStationB/projects/Malvern/{api => }/.flaskenv (50%) delete mode 100644 PyStationB/projects/Malvern/api/app.py create mode 100644 PyStationB/projects/Malvern/api/routes.py create mode 100644 PyStationB/projects/Malvern/src/components/utils/validators.tsx create mode 100644 PyStationB/projects/Malvern/tests/test_routes.py diff --git a/PyStationB/environment.yml b/PyStationB/environment.yml index a297822..e911386 100644 --- a/PyStationB/environment.yml +++ b/PyStationB/environment.yml @@ -5,4 +5,4 @@ dependencies: - python==3.8.3 - pip==20.1.1 - pip: - - -r requirements_dev.txt + - -r requirements_dev.txt \ No newline at end of file diff --git a/PyStationB/libraries/ABEX/requirements.txt b/PyStationB/libraries/ABEX/requirements.txt index 2252839..01b3b6d 100644 --- a/PyStationB/libraries/ABEX/requirements.txt +++ b/PyStationB/libraries/ABEX/requirements.txt @@ -6,6 +6,7 @@ gpy==1.10.0 matplotlib==3.4.2 pandas==1.3.1 param==1.11.1 +psutil==5.7.1 pydantic==1.8.2 pydoe==0.3.8 pyyaml==5.4.1 diff --git a/PyStationB/libraries/PyBCKG/setup.py b/PyStationB/libraries/PyBCKG/setup.py index dd408af..f3e134a 100644 --- a/PyStationB/libraries/PyBCKG/setup.py +++ b/PyStationB/libraries/PyBCKG/setup.py @@ -20,7 +20,7 @@ long_description=long_description, long_description_content_type="text/markdown", url="", - packages=["pyBCKG"], + packages=setuptools.find_packages(), classifiers=[ "Programming Language :: Python :: 3", "Operating System :: OS Independent", diff --git a/PyStationB/libraries/Utilities/psbutils/misc.py b/PyStationB/libraries/Utilities/psbutils/misc.py index 3263661..a43641f 100644 --- a/PyStationB/libraries/Utilities/psbutils/misc.py +++ b/PyStationB/libraries/Utilities/psbutils/misc.py @@ -70,7 +70,7 @@ def flatten_list(lst: List[List[Any]]) -> List[Any]: # pragma: no cover def find_root_directory() -> Path: # pragma: no cover - path = Path(__file__).parent + path = Path.cwd() while True: if (path / "libraries").is_dir() and (path / "projects").is_dir(): return path.resolve() diff --git a/PyStationB/projects/Malvern/.amlignore b/PyStationB/projects/Malvern/.amlignore new file mode 100644 index 0000000..597148a --- /dev/null +++ b/PyStationB/projects/Malvern/.amlignore @@ -0,0 +1,46 @@ +*.egg-info/ +*.ipynb +*.md +.amlignore +.coverage +.coveragerc +.flake8 +.git +.gitattributes +.github +.gitignore +.idea +.mypy.ini +.mypy_cache/ +.pre-commit-config.yaml +.pytest_cache/ +.testmondata +.vscode/ +/additional_requirements*.txt +/azure-pipelines/ +/mypy.ini +/mypy_runner.py +/node_modules/ +/scripts/ +DATA/ +Data/ +LICENSE +Makefile +Notebooks/ +README.txt +Results/ +SC_RESULTS/ +__pycache__/ +abex-results/ +doc/ +docs/ +libraries/StaticCharacterization +libraries/UniProt +make.bat +projects/Barcoder +projects/CellSignalling +projects/OXB_Proteomics +projects/OXB_Transfection +slow_tests/ +test_outputs/ +tests/ diff --git a/PyStationB/projects/Malvern/.coveragerc b/PyStationB/projects/Malvern/.coveragerc new file mode 100644 index 0000000..7be0b7f --- /dev/null +++ b/PyStationB/projects/Malvern/.coveragerc @@ -0,0 +1,13 @@ +[run] +parallel = true +omit = + */setup.py, + */tests/*, + /simulator/* +source = + api + +[report] +show_missing = true +precision = 2 +fail_under = 100.00 diff --git a/PyStationB/projects/Malvern/api/.flaskenv b/PyStationB/projects/Malvern/.flaskenv similarity index 50% rename from PyStationB/projects/Malvern/api/.flaskenv rename to PyStationB/projects/Malvern/.flaskenv index fd9ee63..60bf7e7 100644 --- a/PyStationB/projects/Malvern/api/.flaskenv +++ b/PyStationB/projects/Malvern/.flaskenv @@ -1,2 +1,2 @@ -FLASK_APP=api/app.py +FLASK_APP=api FLASK_ENV=development \ No newline at end of file diff --git a/PyStationB/projects/Malvern/README.md b/PyStationB/projects/Malvern/README.md index 4a9e677..4e1d0f3 100644 --- a/PyStationB/projects/Malvern/README.md +++ b/PyStationB/projects/Malvern/README.md @@ -1,31 +1,29 @@ ## Getting started + 1. Install [Node.js](https://nodejs.org/en/). This will automatically install Node Package Manager (npm) too. + 2. To install the dependencies for our app (specified in package.json): -```cd app``` -```npm install``` -## Setting up the backend (Flask api) -To create the virtual environment: +`npm install` -```pyStationB/app/api$ python -m venv MalvernEnv``` +## Setting up the conda envrionment +Run the following command from the Malvern directory: -## To install the packages: -```pyStationB/app/api$ python -m pip install -r ../requirements_dev.txt``` -```pyStationB/app/api$ python -m pip install ../../libraries/ABEX``` -```pyStationB/app/api$ python -m pip install ../../libraries/Emukit``` +`conda env create -f environment.yml` -## Install pyBCKG git submodule: -```pyStationB/app$ cd ../ ``` -```pyStationB$ git submodule update --init --recursive`` +## To Activate the conda env: -## To Activate the virtual env: +`conda activate malvern` -```pyStationB/app$ api\MalvernEnv\Scripts\activate Malvern`` ## Running the app ### To start the React app -`app$ npm run start` + +`npm run start` ### To start the Flask API -`app$ npm run start-api` +`npm run start-api` + +## Connecting to Azure +When you first start the app you will be prompted to login. To do this, enter a connection string in the login tab. Follow the [instructions here](https://docs.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&tabs=azure-portal#view-account-access-keys) to get the connection string for your storage account \ No newline at end of file diff --git a/PyStationB/projects/Malvern/api/__init__.py b/PyStationB/projects/Malvern/api/__init__.py index e69de29..bf860d6 100644 --- a/PyStationB/projects/Malvern/api/__init__.py +++ b/PyStationB/projects/Malvern/api/__init__.py @@ -0,0 +1,11 @@ +# ------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +# ------------------------------------------------------------------------------------------- +from flask import Flask + +app = Flask(__name__) +app.config["UPLOADS_DIR"] = "./uploads" +from api import routes # type: ignore # noqa: E402 + +assert routes is not None # for flake8 diff --git a/PyStationB/projects/Malvern/api/app.py b/PyStationB/projects/Malvern/api/app.py deleted file mode 100644 index 4685cad..0000000 --- a/PyStationB/projects/Malvern/api/app.py +++ /dev/null @@ -1,403 +0,0 @@ -# ------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -# ------------------------------------------------------------------------------------------- -import json -import sys -from datetime import datetime -from pathlib import Path -from typing import Any, Dict, TypeVar - -import yaml -from azure.cosmosdb.table.tableservice import TableService -from azure.storage.blob import BlobClient -from flask import Flask, Response, jsonify, request -from werkzeug.http import HTTP_STATUS_CODES - -ROOT_DIR = Path(__file__).parent.parent.parent.parent -sys.path.append(str(ROOT_DIR)) -PYBCKG_DIR = ROOT_DIR / "libraries" / "PyBCKG" -sys.path.append(str(PYBCKG_DIR)) - -SPECS_DIR = ROOT_DIR / "libraries/ABEX/tests/data/specs" - -from abex.optimizers.optimizer_base import OptimizerBase # noqa: E402 -from abex.settings import load_config_from_path_or_name, load_resolutions # noqa: E402 # type: ignore # auto - -from libraries.PyBCKG.pyBCKG.azurestorage.api import from_connection_string # noqa: E402 -from libraries.PyBCKG.pyBCKG.utils import HttpRequestMethod # noqa: E402 - -app = Flask(__name__) - - -@app.route("/") -@app.route("/get-experiment-options", methods=["GET"]) -def get_experiments(): - connection_string = request.headers.get("storageConnectionString") - - az_conn = from_connection_string(connection_string) - query = "experiments()" - # queryfilter = az_conn._queryfilter('deprecated', "False") - # queryfilter = f"?$filter=Deprecated eq '{False}'" - queryfilter = "" - expt_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) - - experiments = expt_json["value"] - return Response(json.dumps(experiments), mimetype="application/json") - - -@app.route("/get-aml-runs", methods=["GET"]) -def get_aml_runs(): - connection_string = request.headers.get("storageConnectionString") - - az_conn = from_connection_string(connection_string) - query = "azuremlruns()" - queryfilter = "" - aml_runs_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) - - aml_runs = aml_runs_json["value"] - - return Response(json.dumps(aml_runs), mimetype="application/json") - - -@app.route("/get-experiment-result", methods=["GET"]) -def get_experiment_results(): - # connection_string = request.headers.get("storageConnectionString") - - experiment_name = request.args.get("experimentName") - print(f"experiment name: {experiment_name}") - - # TODO: Download AML run and construct IExperimentResult object - - experiment_results = [ - { - "id": 1, - "description": "", - "samples": [{}], - "signals": [{}], - "type": "", - "timestamp": "", - "deprecated": "", - "name": "experiment1", - "iterations": ["1"], - "folds": ["1", "2", "3"], - "imageFolders": ["/abex-results/tutorial-intro"], - "imageNames": [ - "slice1d_", - "slice2d_", - "acquisition1d_", - "train_test_", - "bo_distance", - "bo_experiment", - ], - "suggestedExperiments": [ - {"x": 5.0, "y": 8.4}, - {"x": 5.6, "y": 8.3}, - {"x": 5.3, "y": 8.5}, - {"x": 5.7, "y": 8.8}, - {"x": 5.4, "y": 8.2}, - ], - }, - { - "id": 2, - "description": "", - "samples": [{}], - "signals": [{}], - "type": "", - "timestamp": "", - "deprecated": "", - "name": "experiment2", - "iterations": ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"], - "folds": ["1", "2", "3", "4", "5", "6", "7", "8", "9"], - "imageFolders": ["/abex-results/synthetic_3input_batch5"], - "imageNames": [ - "slice1d_", - "slice2d_", - "acquisition1d_", - "train_only", - "bo_distance", - "bo_experiment", - ], - "suggestedExperiments": [ - {"ARA": 1.0, "ATC": 3.3, "C_on": 1}, - {"ARA": 1.1, "ATC": 3.5, "C_on": 1}, - {"ARA": 1.1, "ATC": 3.6, "C_on": 1}, - {"ARA": 1.1, "ATC": 3.8, "C_on": 1}, - {"ARA": 1.1, "ATC": 3.5, "C_on": 1}, - ], - }, - ] - - # This will be the real result once we are storing these storing results in BCKG - # expt_result =[ex for ex in experiment_results if ex.name == experiment_name][0] - expt_result = experiment_results[0] - - return Response(json.dumps(expt_result), mimetype="application/json") - - -@app.route("/get-config-options", methods=["GET"]) -def get_configs(): - configs = [{"id": "1", "name": "config1"}, {"id": "2", "name": "config2"}] - - return Response(json.dumps(configs), mimetype="application/json") - - -@app.route("/get-dataset-options", methods=["GET"]) -def get_datasets(): - datasets = [ - { - "id": "1", - "name": "dataset1", - "dateCreated": "17 Feb 2021", - "dataRecords": [ - { - "SampleId": "S1", - "ObservationId": "O3", - "Arabinose": 4.67, - "C6": 345.23, - "C12": 12334.34, - "EYFP": 187982.23, - "ECFP": 23445.4, - "MRFP": 765.67, - }, - { - "SampleId": "S2", - "ObservationId": "O1", - "Arabinose": 6.54, - "C6": 234.63, - "C12": 3243.98, - "EYFP": 87668.34, - "ECFP": 72726.21, - "MRFP": 7725.43, - }, - ], - }, - { - "id": "2", - "name": "dataset2", - "dateCreated": "01 March 2021", - "dataRecords": [ - {"A": "a", "B": 2, "C": "e", "D": 4}, - {"A": "b", "B": 1, "C": "f", "D": 1}, - ], - }, - ] - return Response(json.dumps(datasets), mimetype="application/json") - - -def parse_binary_to_dict(data: bytes) -> yaml.YAMLObject: - header_dict = json.loads(data) - data_dict = header_dict.get("headers") - - return data_dict - - -def get_file_name(data_dict: Dict[str, Any]): - config_name = data_dict.get("fileName") - return config_name - - -def get_config_data(data_dict: Dict[str, Any]): - config_data = data_dict.get("config") - config_json = yaml.safe_load(config_data) # type: ignore - return config_json - - -T = TypeVar("T") - - -def get_csv_data(data_dict: Dict[str, T]) -> T: - observation_data = data_dict.get("observations") - print("observation data: ", observation_data) - # TODO: PARSE CSV - observations = observation_data - assert observations is not None # since non-None return is assumed by some callers - return observations - - -def get_connection_str_from_binary(data: bytes): - header_dict = json.loads(data) - data_dict = header_dict.get("headers") - connection_str = data_dict.get("storageConnectionString") - return connection_str - - -def http_response(status_code, message, error_code=None, response=None, reason=None): - response_status_code = HTTP_STATUS_CODES.get(status_code, "Unknown error") - if response_status_code == 409 and "blob already exists" in message: - message += " If you are sure this is new data, try renaming the file." - response = jsonify( - error=response_status_code, - error_code=error_code, - reason=reason, - message=message, - ) - response.status_code = status_code - return response - - -def upload_to_blob_storage(yaml_data: yaml.YAMLObject, connection_string: str, blob_name: str): - blob = BlobClient.from_connection_string( - conn_str=connection_string, container_name="testfiles", blob_name=blob_name - ) - # upload blob - try: - blob.upload_blob(json.dumps(yaml_data), overwrite=False) - return http_response(200, "Success") - except Exception as e: - # TODO: specify the type of the exception more exactly, so we can be sure it has the fields assumed here. - response = http_response( - e.status_code, # type: ignore - e.message, # type: ignore - error_code=e.error_code, # type: ignore - response=e.response, # type: ignore - reason=e.reason, # type: ignore - ) - print(response) - return response - - # List the blobs in the container - # blob_list_after = container_client.list_blobs() - - -def insert_config_table_entry(connection_string: str, config_name: str, config_path: str): - table_conn = TableService(connection_string=connection_string) - new_entry = { - "PartitionKey": "app", - "RowKey": config_name, - "Timestamp": datetime.now(), - "ConfigName": config_name, - "PathToBlob": config_path, - } - table_conn.insert_entity("abexconfigs", new_entry) - - -def insert_observation_table_entry(connection_string: str, file_name: str, file_path: str): - table_conn = TableService(connection_string=connection_string) - new_entry = { - "PartitionKey": "app", - "RowKey": file_name, - "Timestamp": datetime.now(), - "FileName": file_name, - "PathToBlob": file_path, - } - table_conn.insert_entity("abexObservations", new_entry) - - -@app.route("/upload-config-data", methods=["GET", "POST"]) -def upload_config_data(): - """ - Parse data into yaml and then upload to blob storage, as well as creating table entry - """ - data = request.get_data() - - data_dict = parse_binary_to_dict(data) - config_name: str = get_file_name(data_dict) # type: ignore # auto - config_data = get_config_data(data_dict) # type: ignore # auto - blob_name = config_name.split(".")[0] - blob_path = "testfiles/" + blob_name - - # Upload the file to blob storage - connection_string = get_connection_str_from_binary(data) - upload_blob_response = upload_to_blob_storage(config_data, connection_string, blob_name) - if upload_blob_response.status_code != 200: - return upload_blob_response - - # TODO: move this once specs folders fixed - # copy into abex specs folder - new_spec_path = SPECS_DIR / config_name - print("saving new spec to: ", new_spec_path) - with open(new_spec_path, "w+") as f_path: - yaml.dump(config_data, f_path) - - assert new_spec_path.is_file() - - # Add storage table entry - insert_config_table_entry(connection_string, blob_name, blob_path) - return {"filePath": config_name} - - -@app.route("/upload-observation-data", methods=["GET", "POST"]) -def upload_observation_data(): - """ - Upload observations - """ - data = request.get_data() - data_dict = parse_binary_to_dict(data) - print(f"data dict: {data_dict}") - file_name = get_file_name(data_dict) # type: ignore # auto - csv_data: yaml.YAMLObject = get_csv_data(data_dict) # type: ignore # auto - - blob_name = file_name.split(".")[0] - blob_path = "testfiles/" + blob_name - - # Upload the file to blob storage - connection_string = get_connection_str_from_binary(data) - upload_blob_response = upload_to_blob_storage(csv_data, connection_string, blob_name) - if upload_blob_response.status_code != 200: - return upload_blob_response - - # Add storage table entry - insert_observation_table_entry(connection_string, blob_name, blob_path) - - return {"filePath": file_name} - - -@app.route("/login/", methods=["GET"]) -def login(connection_string: str): - conn = from_connection_string(connection_string) - if conn: - print("conn successful") - return {"success": True} - - -@app.route("/submit-new-experiment", methods=["GET", "POST"]) -def submit_new_experiment(): - # TODO: start new experiment track - """ - Submit a new experiment action. - 1. Retrieve the config from user's config table - 2. Retrieve the csv to user's csv table - - x. Create ABEX Config - y. Submit the ABEX experiment - """ - data = request.get_data() - print(f"Data sent to submit-new-experiment: {data}") - - data_dict = parse_binary_to_dict(data) - print(f"\ndata dict: {data_dict}") - - config_path = data_dict.get("configPath") # type: ignore # auto - # config_name = config_path.split('.')[0] - print(f"config path: {config_path}") - # observation_path = data_dict.get("observationsPath") - - yaml_file_path, config_dict = load_config_from_path_or_name(config_path) - print(f"yaml file path: {yaml_file_path}") - print(f"config dict: {config_dict}") - - for pair_list in load_resolutions(config_path): - for _, config in pair_list: - # Decide which optimization strategy should be used - print(f"\nConfig: {config}") - optimizer = OptimizerBase.from_strategy(config, config.optimization_strategy) - optimizer.run() - - return data - - -@app.route("/submit-iteration", methods=["GET", "POST"]) -def submit_iteration_form(): - # TODO: kick off new iteration - data = request.get_data() - print(data) - return data - - -@app.route("/submit-clone", methods=["GET", "POST"]) -def submit_cloneform(): - # TODO: kick off clone of previous experiment - data = request.get_data() - print(data) - return data diff --git a/PyStationB/projects/Malvern/api/routes.py b/PyStationB/projects/Malvern/api/routes.py new file mode 100644 index 0000000..4c0b524 --- /dev/null +++ b/PyStationB/projects/Malvern/api/routes.py @@ -0,0 +1,600 @@ +# ------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +# ------------------------------------------------------------------------------------------- +import json +import sys +from werkzeug.datastructures import EnvironHeaders + +from azure.cosmosdb.table.tableservice import TableService +from azure.storage.blob import BlobClient +from datetime import datetime +from flask import Response, jsonify, request +from pandas import DataFrame, read_csv +from pathlib import Path +from typing import Any, Dict, Tuple, Union +from werkzeug.http import HTTP_STATUS_CODES +from werkzeug.wrappers.request import Request +from yaml import safe_load as yaml_safe_load +from yaml import dump as yaml_dump + +from abex.simulations.run_simulator_pipeline import run_simulator_pipeline # type: ignore # noqa: E402 +from abex.settings import load_config_from_path_or_name # type: ignore # noqa: E402 # auto +from psbutils.misc import ROOT_DIR # type: ignore # noqa: E402 +from pyBCKG.azurestorage.api import from_connection_string # type: ignore # noqa: E402 +from pyBCKG.utils import HttpRequestMethod # type: ignore # noqa: E402 + +from api import app # type: ignore # noqa: E402 + + +MALVERN_DIR = ROOT_DIR / "projects" / "Malvern" +SIMULATION_SCRIPTS_DIR = ROOT_DIR / "projects" / "Malvern" / "simulator" +BLOB_CONTAINER = "testfiles" +CELLSIG_SCRIPTS_DIR = ROOT_DIR / "projects" / "CellSignaling" / "cellsignalling" / "cellsig_sim" / "scripts" + +@app.route("/") # type: ignore +@app.route("/get-experiment-options", methods=["GET"]) # type: ignore +def get_experiments() -> Response: + """ + Retrieves a list of all stored experiments from the relevant Azure Table. + + Returns: Flask Response object containing the stored experiments + + """ + connection_string = request.headers.get("storageConnectionString") + + az_conn = from_connection_string(connection_string) + query = "experiments()" + # queryfilter = az_conn._queryfilter('deprecated', "False") + # queryfilter = f"?$filter=Deprecated eq '{False}'" + queryfilter = "" + expt_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) + experiments = expt_json["value"] + # print(f"\nExpt options: {json.dumps(experiments)}") + return Response(json.dumps(experiments), mimetype="application/json") + + +@app.route("/get-aml-runs", methods=["GET"]) # type: ignore +def get_aml_runs() -> Response: + """ + Retrieves a list of Azure ML Runs (i.e. previously launched experiments) from the + relevant Azure Table. + + Returns: + Flask Response object containing the stored Azure ML RunIds and time of creation + """ + connection_string = request.headers.get("storageConnectionString") + + az_conn = from_connection_string(connection_string) + query = "azuremlruns()" + queryfilter = "" + aml_runs_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) + + aml_runs = aml_runs_json["value"] + # print("\nAzureML Runs: ") + # print(aml_runs) + + return Response(json.dumps(aml_runs), mimetype="application/json") + + +@app.route("/get-experiment-result", methods=["GET"]) # type: ignore +def get_experiment_results() -> Response: + """ + Retrieves a list of stored Experiment Results from the relevant Azure Table. Differents from + Experiments table as it also contains paths to results and suggested next experiments + # TODO: This table only exists in dummy data - create in real. + + Returns: + Flask Response containing the stored ExperimentResults. + """ + + connection_string = request.headers.get("storageConnectionString") + experiment_name = request.args.get("experimentName") + print(f"\n\nLooking for experiment name: {experiment_name}") + + az_conn = from_connection_string(connection_string) + query = "experimentResults()" + queryfilter = f"?$filter=RowKey%20eq%20'{experiment_name}'" + + experiment_results_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) + experiment_results = experiment_results_json["value"] + print(experiment_results) + + assert len(experiment_results) >0, "Error - no experiment exists with that rowKey" + assert len(experiment_results) <2, "Error - more than 1 experiment exists with that RowKey" + + + return Response(json.dumps(experiment_results), mimetype="application/json") + + +@app.route("/get-config-options", methods=["GET"]) # type: ignore +def get_configs() -> Response: + """ + Retrieves a list of stored configs (name and path to location in Blob Storage) from the relevant Azure Table + + Returns: + Flask Response containing the returned Config entries + """ + # TODO: This table only exists in dummy data - create in real + # configs = [{"id": "1", "name": "config1"}, {"id": "2", "name": "config2"}] + connection_string = request.headers.get("storageConnectionString") + az_conn = from_connection_string(connection_string) + query = "abexconfigs()" + queryfilter = "" + + experiment_results_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) + configs = experiment_results_json["value"] + # print("Configs: ", configs) + return Response(json.dumps(configs), mimetype="application/json") + + +@app.route("/get-dataset-options", methods=["GET"]) # type: ignore +def get_datasets(): + """ + Retrieves a list of stored datasets (name and path to location in Blob Storage) from the relevant Azure Table + + Returns: + Flask Response containing the returned Config entries + """ + connection_string = request.headers.get("storageConnectionString") + az_conn = from_connection_string(connection_string) + query = "abexDatasets()" + queryfilter = "" + + dataset_results_json = az_conn.query_table(HttpRequestMethod.GET, query, queryfilter) + datasets = dataset_results_json["value"] + return Response(json.dumps(datasets), mimetype="application/json") + + +def get_file_name(data_dict: Dict[str, Any]): + """ + Given a dictionary retrieved from a Flask request, retrieve the value + of the 'fileName' field + + Args: + data_dict: the dictionary retrieved from the Flask request + + Returns: + The config name + """ + config = data_dict.get("config") + assert config is not None + config_data = json.loads(config) + config_name = config_data.get("fileName") + return config_name + + +def get_connection_str_from_request_headers(request: Request): + """ + Get Azure Table Storage connection string from the headers of a Flask request + + Args: + request: Flask request + + Returns: + An Azure Storage connection string + """ + headers: EnvironHeaders = request.headers + connection_str = headers["storageConnectionString"] + return connection_str + + +def http_response(status_code, message, error_code=None, reason=None): + """ + Generate a Flask Response + + Args: + status_code: HTTP status code of the request. + message: message body + error_code: Optional HTTP status related to the error + reason: short description of the status code. + + Returns: + [type]: [description] + """ + response_status_code = HTTP_STATUS_CODES.get(status_code, "Unknown error") + if status_code == 409 and "blob already exists" in message: + message += " If you are sure this is new data, try renaming the file." + response = jsonify( + status_code=response_status_code, + message=message, + error_code=error_code, + reason=reason, + ) + response.status_code = status_code + return response + + +def upload_to_blob_storage(data: str, connection_string: str, blob_name: str): + """ + Upload data to blob storage + + Args: + data: The data to upload + connection_string: Connection string to connect to Azure Storage + blob_name: Name to save the Blob as + + Returns: + HTTP response demonstrating success or failure + """ + blob = BlobClient.from_connection_string( + conn_str=connection_string, container_name=BLOB_CONTAINER, blob_name=blob_name + ) + # upload blob + try: + blob.upload_blob(data, overwrite=False) + return http_response(200, "Success") + except Exception as e: + print(f"Exception: {e}") + # TODO: specify the type of the exception more exactly, so we can be sure it has the fields assumed here. + response = http_response( + e.status_code, # type: ignore + e.message, # type: ignore + error_code=e.error_code, # type: ignore + reason=e.reason, # type: ignore + ) + print(response) + return response + + # List the blobs in the container + # blob_list_after = container_client.list_blobs() + + +def insert_config_table_entry(connection_string: str, config_name: str, config_path: str): + """ + Insert entry to relevant Azure Table, recording timestamp, config name + and path to config file in Blob Storage. + + Args: + connection_string: A string containing credentials for connecting to Azure Table + config_name: The name of the uploaded file containing the config + config_path: The path in Blob Storage where the config is stored + """ + table_conn = TableService(connection_string=connection_string) + new_entry = { + "PartitionKey": "app", + "RowKey": config_name, + "Timestamp": datetime.now(), + "ConfigName": config_name, + "PathToBlob": config_path, + } + table_conn.insert_entity("abexconfigs", new_entry) + + +def insert_dataset_table_entry(connection_string: str, file_name: str, file_path: str) -> None: + """ + Insert entry to relevant dataset Azure Table, recording timestamp, observation file name + and path to file to Blob Storage. + + Args: + connection_string: A string containing credentials for connecting to Azure Table + file_name: The name of the uploaded file containing observations + file_path: The path in Blob Storage where the observations are stored + """ + table_conn = TableService(connection_string=connection_string) + new_entry = { + "PartitionKey": "app", + "RowKey": file_name, + "Timestamp": datetime.now(), + "FileName": file_name, + "PathToBlob": file_path, + } + table_conn.insert_entity("abexDatasets", new_entry) + + +def insert_observation_table_entries(connection_string: str, entries_to_insert: DataFrame) -> None: + """ + When user uploads CSV file of observations, add one entry per row into the relevant observations Azure Table. + + Args: + connection_string: string containing credentials for connecting to Azure Table + entries_to_insert: Pandas DataFrame of observations to be inserted into Azure Table + """ + table_conn = TableService(connection_string=connection_string) + print(f"entities: {entries_to_insert}") + + # TODO: insert entity must be either in dict format or entity object (cosmosdb table) + records = entries_to_insert.to_dict(orient="records") + + def _insert_row(row): + table_conn.insert_entity("abexObservations", row) # pragma: no cover + + map(_insert_row, records) + + +@app.route("/upload-config-data", methods=["GET", "POST"]) # type: ignore +def upload_config_data() -> Dict[str, str]: + """ + When user uploads a config file, insert into relevant Azure Table and store a copy locally, + to use in AzureML. + + Returns: + A dictionary containing the path to the config file locally. + """ + # TODO: Return blobpath instead of filepath and confgure AML to read from Blob Storage + config_data = request.get_data() + print(f"Data: {config_data}") + + if request.method == "POST": + print(f"Request: {request}") + config, filename = save_and_read_uploaded_file(request) + assert isinstance(filename, str) + + blob_name = filename.split(".")[0] + blob_path = BLOB_CONTAINER + '/' + blob_name + + # Upload the file to blob storage + connection_string = get_connection_str_from_request_headers(request) + upload_blob_response = upload_to_blob_storage( + json.dumps(config) + "This is a good request", connection_string, blob_name + ) + if upload_blob_response.status_code != 200: + return upload_blob_response # type: ignore # pragma: no cover + + # Add storage table entry + insert_config_table_entry(connection_string, blob_name, blob_path) + return {"filePath": filename} + + # otherwise GET request + else: + return {"filePath": ""} # pragma: no cover + + +def update_config_folder(config: Dict[str, Any]) -> Dict[str, Any]: + """ + Update the config data to replace the 'folder' entry with the Malvern Uploads directory + + Args: + config: The config read from the uploaded file + + Returns: + The updated config + """ + # TODO: Folder needs to be specific to experiment + new_folder = Path(app.config["UPLOADS_DIR"]) # type: ignore + new_folder.mkdir(exist_ok=True) + config["data"]["folder"] = str(new_folder) + return config + + +def save_and_read_uploaded_file(request: Request) -> Tuple[Union[DataFrame, Dict[str, Any]], str]: + """ + Given a Flask HTTP request, retrieve the uploaded file (if existing), read it and save it in the uploads folder. + Returns the data contained in the file (expected to be either a table of observations, or a config dictionary), + plus the file name. + + Args: + request: Flask HTTP request + + Returns: + The contents of the uploaded file (either as a Pandas DataFrame, or a dictionary) and the filename + """ + if request.files: + Path(app.config["UPLOADS_DIR"]).mkdir(exist_ok=True) + uploaded_file = request.files["file"] + file_name: str = Path(uploaded_file.filename).name # type: ignore + filepath = Path(app.config["UPLOADS_DIR"]) / file_name + uploaded_file.seek(0) # type: ignore + uploaded_file.save(filepath) + if filepath.suffix == ".csv": + with open(filepath, "r") as f_path: + data = read_csv(f_path) + elif filepath.suffix == ".yml": + # Separately read and re-write the file + with open(filepath, "r") as f_path: + data = yaml_safe_load(f_path) + # update the config to + if "data" in data: + if "folder" in data["data"]: # pragma: no cover + data = update_config_folder(data) + with open(filepath, "w") as f_path: + yaml_dump(data, f_path) + + else: + raise ValueError(f"Unrecognised file ending: {filepath.suffix}") + else: + raise ValueError("No file attached to request") + uploaded_file_name = uploaded_file.filename + assert isinstance(uploaded_file_name, str) + return data, uploaded_file_name # type: ignore + + +@app.route("/upload-observation-data", methods=["GET", "POST"]) # type: ignore +def upload_observation_data(): + """ + When user uploads a CSV file of observations, insert entry into relevant dataset Azure + Table to track uploaded datasets, then parse the uploaded file and add rows as entries in + the relevant observations Azure Table + + Returns: + A dictionary containing the path to the observation file locally + """ + data = request.get_data() + # data_dict = json.loads(data) + print(f"data dict: {data}") + # file_name = get_file_name(data_dict) # type: ignore # auto + # print(f'CSV data: \n {csv_data}') + + if request.method == "POST": + print(f"Request: {request}") + observations, filename = save_and_read_uploaded_file(request) + + blob_name = filename.split(".")[0] + # path in Azure Blob Storage + # TODO: update this path + blob_path = "testfiles/" + blob_name + + # Upload the file to blob storage + connection_string = get_connection_str_from_request_headers(request) + assert isinstance(observations, DataFrame) + upload_blob_response = upload_to_blob_storage(json.dumps(observations.to_dict()), connection_string, blob_name) + if upload_blob_response.status_code != 200: + return upload_blob_response + + # Add storage table entry + insert_dataset_table_entry(connection_string, blob_name, blob_path) + + # Add observations to table + insert_observation_table_entries(connection_string, observations) + + return {"filePath": filename} + + # Otherwise GET request + else: + return {"filePath": ""} # pragma: no cover + + +@app.route("/login/", methods=["GET"]) # type: ignore +def login(connection_string: str): + """ + If connection string parses successfully, consider user to be logged in. + #TODO: validate user permissions + + Args: + connection_string: Azure Storage connection string + + Returns: + success status (whether user was successfully logged in or not) + """ + conn = from_connection_string(connection_string) + if conn: + print("conn successful") + return {"success": True} + else: + return {"success": False} + + +@app.route("/submit-new-experiment", methods=["GET", "POST"]) # type: ignore +def submit_new_experiment(): + """ + Performs the following steps: + 1. Retrieve the config that the user uploaded/ selected + 2. Resolve all combinations of experiment arguments within the config + 3. Launch ABEX experiment (submits job to AML) + + Returns: + Response containing details of the AML Run such as RunId and URL. + """ + sys.path.append(str(ROOT_DIR / "projects")) + from cellsignalling.cellsig_sim.optconfig import CellSignallingOptimizerConfig # type: ignore # noqa: E402 + data = request.get_data() + print(f"Data sent to submit-new-experiment: {data}") + + if request.method == "POST": + + data_dict = json.loads(data) + print(f"\ndata dict: {data_dict}") + + config_name = data_dict["headers"]["configPath"] # type: ignore # auto + config_path = Path(app.config["UPLOADS_DIR"]) / config_name + print(f"config path: {config_path}") + + yaml_file_path, config_dict = load_config_from_path_or_name(config_path) + print(f"yaml file path: {yaml_file_path}") + print(f"config dict: {config_dict}") + + aml_config = data_dict["headers"]["amlConfig"]["data"] + print(f"\nAML config: {aml_config}\n") + + arg_list = [ + "--spec_file", + str(yaml_file_path), + "--aml_root_dir", + str(ROOT_DIR), + "--num_iter", + "2", + "--num_runs", + "2", + "--plot_simulated_slices", + "--submit_to_aml", + ] + + azure_args = { + "subscription_id": aml_config["SubscriptionId"], + "resource_group": aml_config["ResourceGroup"], + "workspace_name": aml_config["WorkspaceName"], + "compute_target": aml_config["ComputeTarget"], + } + + run_script_path = CELLSIG_SCRIPTS_DIR / "run_cell_simulations_in_pipeline.py" + plot_script_path = CELLSIG_SCRIPTS_DIR / "plot_cellsig_predicted_optimum_convergence.py" + loop_config_class = CellSignallingOptimizerConfig + + pipeline_run, pipeline = run_simulator_pipeline( + arg_list, run_script_path, plot_script_path, loop_config_class, aml_arg_dict=azure_args + ) + + run_details = { + "ExperimentName": pipeline_run.experiment.name, + "RunId": pipeline_run._run_id, + "RunUrl": pipeline_run._run_details_url, + } + + print(f"Returning run details: \n{run_details}") + + return Response(json.dumps(run_details), mimetype="application/json") + else: + return {"filePath": None} # pragma: no cover + + +@app.route("/submit-iteration", methods=["GET", "POST"]) # type: ignore +def submit_iteration_form(): # pragma: no cover + # TODO: kick off new iteration + data = request.get_data() + print(data) + return data + + +@app.route("/submit-clone/", methods=["GET", "POST"]) # type: ignore +def submit_clone_form(aml_run_id): # pragma: no cover + """ + Repeat a previously submitted Azure ML experiment Run. + + Args: + aml_run_id: The Azure ML RunId of the Run to be repeated. + + Returns: + [type]: [description] + """ + # TODO: kick off clone of previous experiment + data = request.get_data() + print(data) + return data + + +@app.route("/parse-aml-secrets", methods=["GET", "POST"]) # type: ignore +def parse_aml_secrets(): + """ + Upload user-defined file containing secrets necesary for submitting jobs to AML + (including subscription id, resource group name, workspace name and compute target name) + and store these secrets in + + Returns: + HTTP response from submitting the request to upload + """ + data = request.get_data() + + print(f"data dict: {data}") + + if request.method == "POST": + aml_secrets, filename = save_and_read_uploaded_file(request) + print(f"AML secrets: {aml_secrets}") + aml_secrets = aml_secrets["variables"] + subscription_id = aml_secrets["subscription_id"] + resource_group = aml_secrets["resource_group"] + workspace_name = aml_secrets["workspace_name"] + compute_target = aml_secrets["compute_target"] + # TODO: Store in props to be accessed when submitting experiment + + config = { + "SubscriptionId": subscription_id, + "ResourceGroup": resource_group, + "WorkspaceName": workspace_name, + "ComputeTarget": compute_target, + } + + return Response(json.dumps(config), mimetype="application/json") + + # Otherwise GET request + else: + return {"success": False} # pragma: no cover diff --git a/PyStationB/projects/Malvern/environment.yml b/PyStationB/projects/Malvern/environment.yml index 4686f13..15505ff 100644 --- a/PyStationB/projects/Malvern/environment.yml +++ b/PyStationB/projects/Malvern/environment.yml @@ -6,3 +6,7 @@ dependencies: - pip==20.1.1 - pip: - -r requirements_dev.txt + - ../../libraries/ABEX + - ../../libraries/Emukit + - ../../libraries/PyBCKG + - ../../libraries/Utilities \ No newline at end of file diff --git a/PyStationB/projects/Malvern/package.json b/PyStationB/projects/Malvern/package.json index eab179e..75ade01 100644 --- a/PyStationB/projects/Malvern/package.json +++ b/PyStationB/projects/Malvern/package.json @@ -14,7 +14,7 @@ "@types/react-redux": "7.1.16", "@types/react-router-dom": "5.1.5", "axios": "^0.21.1", - "azure-storage": " 2.10.3", + "azure-storage": "^2.10.4", "react": "^17.0.1", "react-app-polyfill": "1.0.6", "react-bootstrap": "^1.5.2", @@ -31,7 +31,7 @@ }, "scripts": { "start": "react-scripts start", - "start-api": "cd api && MalvernEnv\\Scripts\\python -m flask run", + "start-api": "flask run", "build": "react-scripts build", "test": "react-scripts test", "lint": "eslint src/**/*.{ts,tsx,js,jsx}", diff --git a/PyStationB/projects/Malvern/public/index.html b/PyStationB/projects/Malvern/public/index.html index 54a083b..087e129 100644 --- a/PyStationB/projects/Malvern/public/index.html +++ b/PyStationB/projects/Malvern/public/index.html @@ -2,7 +2,9 @@ - + diff --git a/PyStationB/projects/Malvern/requirements_dev.txt b/PyStationB/projects/Malvern/requirements_dev.txt index bc04b49..4a8e28c 100644 --- a/PyStationB/projects/Malvern/requirements_dev.txt +++ b/PyStationB/projects/Malvern/requirements_dev.txt @@ -1 +1,12 @@ -r requirements.txt +black==21.7b0 +docutils==0.16 +flake8==3.9.2 +interrogate==1.4.0 +mypy==0.910 +psutil==5.7.1 +pytest==6.2.4 +pytest-cov==2.12.1 +pytest-testmon==1.1.1 +pytest-timeout==1.4.2 +pytest-xdist==2.3.0 diff --git a/PyStationB/projects/Malvern/src/actions/ConfigActions.ts b/PyStationB/projects/Malvern/src/actions/ConfigActions.ts index 67719c8..e7a6ee8 100644 --- a/PyStationB/projects/Malvern/src/actions/ConfigActions.ts +++ b/PyStationB/projects/Malvern/src/actions/ConfigActions.ts @@ -8,7 +8,7 @@ import { GET_CONFIGS_FAIL, GET_CONFIGS_SUCCESS, UPLOAD_CONFIG_FAIL, UPLOAD_CONFI /// GET CONFIG OPTIONS /// function getConfigOptions(apiUrl: string, connectionString: string) { return axios.get( - apiUrl + '/get-dataset-options', + apiUrl + '/get-config-options', { headers: { storageConnectionString: connectionString } } ) } @@ -38,20 +38,23 @@ export function GetConfigOptionsActionCreator(apiUrl: string) { /// UPLOAD CONFIG /// function uploadConfig(apiUrl: string, connectionString: string, uploadedConfig: File, binary: string) { - + const data = new FormData() + data.append('file', uploadedConfig) const response = axios.post( apiUrl + '/upload-config-data', + data, { headers: { storageConnectionString: connectionString, fileName: uploadedConfig.name || "", - config: binary + // config: binary } } ) console.log('response from upoad config: ', response) return response + } export function UploadConfigActionCreator(apiUrl: string, formData: FormData) { @@ -63,6 +66,8 @@ export function UploadConfigActionCreator(apiUrl: string, formData: FormData) { getFileFromInput(uploadedConfig) .then(binary => { + console.log('returning binary', binary) + console.log('returning config', uploadedConfig ) return uploadConfig(apiUrl, connectionString, uploadedConfig, binary) .then((response) => dispatch({ diff --git a/PyStationB/projects/Malvern/src/actions/DatasetActionTypes.ts b/PyStationB/projects/Malvern/src/actions/DatasetActionTypes.ts index c936c7b..e7c1687 100644 --- a/PyStationB/projects/Malvern/src/actions/DatasetActionTypes.ts +++ b/PyStationB/projects/Malvern/src/actions/DatasetActionTypes.ts @@ -1,4 +1,4 @@ -import { IDataset } from "../components/Interfaces" +import { IAMLConfig, IDataset } from "../components/Interfaces" /// DATASETS /// @@ -53,4 +53,35 @@ export interface IUploadDatasetFail { } } -export type UploadDatasetDispatchType = IUploadingDataset | IUploadDatasetSuccess | IUploadDatasetFail \ No newline at end of file +export type UploadDatasetDispatchType = IUploadingDataset | IUploadDatasetSuccess | IUploadDatasetFail + + +// PARSE AML SECRETS +export const PARSING_AML_SECRETS = "PARSING_AML_SECRETS" +export const PARSE_AML_SECRETS_FAIL = "PARSE_AML_SECRETS_FAIL" +export const PARSE_AML_SECRETS_SUCCESS = "PARSE_AML_SECRETS_SUCCESS" + + +export interface IAMLConnectionResult { + aml_config?: IAMLConfig +} + +export interface IParsingAMLSecrets { + type: typeof PARSING_AML_SECRETS +} + +export interface IParseAMLSecretsFail { + type: typeof PARSE_AML_SECRETS_FAIL + payload: { + response: any + } +} + +export interface IParseAMLSecretsSuccess { + type: typeof PARSE_AML_SECRETS_SUCCESS + payload: { + aml_config: IAMLConfig + } +} + +export type ParseAMLSecretsDispatchType = IParsingAMLSecrets | IParseAMLSecretsSuccess | IParseAMLSecretsFail diff --git a/PyStationB/projects/Malvern/src/actions/DatasetActions.ts b/PyStationB/projects/Malvern/src/actions/DatasetActions.ts index a316d1f..ea3e381 100644 --- a/PyStationB/projects/Malvern/src/actions/DatasetActions.ts +++ b/PyStationB/projects/Malvern/src/actions/DatasetActions.ts @@ -3,7 +3,7 @@ import { Dispatch } from "redux"; import { IDataset } from "../components/Interfaces"; import { IAppState } from "../reducers/RootReducer"; import { getFileFromInput } from "./actionUtils"; -import { GET_DATASETS_FAIL, GET_DATASETS_SUCCESS, UPLOAD_DATASET_FAIL, UPLOAD_DATASET_SUCCESS } from "./DatasetActionTypes"; +import { GET_DATASETS_FAIL, GET_DATASETS_SUCCESS, PARSE_AML_SECRETS_FAIL, PARSE_AML_SECRETS_SUCCESS, UPLOAD_DATASET_FAIL, UPLOAD_DATASET_SUCCESS } from "./DatasetActionTypes"; /// GET DATASETS /// @@ -40,20 +40,24 @@ export function GetDatasetOptionsActionCreator(apiUrl: string, connectionString: /// UPLOAD OBSERVATIONS /// function uploadObservations(apiUrl: string, connectionString: string, uploadedObservations: File, binary: string) { + const data = new FormData() + data.append('file', uploadedObservations) const response = axios.post( apiUrl + '/upload-observation-data', + data, { headers: { storageConnectionString: connectionString, fileName: uploadedObservations.name || "", - observations: binary + // observations: binary } - } + }, ) console.log('response from upload observations: ', response) return response } + export function UploadObservationsActionCreator(apiUrl: string, formData: FormData) { return function (dispatch: Dispatch, getState: () => IAppState) { const connectionString = getState().connectionState.connection?.connectionString || "" @@ -61,8 +65,11 @@ export function UploadObservationsActionCreator(apiUrl: string, formData: FormDa const uploadedObservations = formData.get('uploadObservations') as File console.log('observations in action: ', uploadedObservations) + // load data from file as binary getFileFromInput(uploadedObservations) .then(binary => { + console.log('returning binary: ', binary) + console.log('uploaded observations: ', uploadedObservations) return uploadObservations(apiUrl, connectionString, uploadedObservations, binary) .then((response) => dispatch({ @@ -77,5 +84,62 @@ export function UploadObservationsActionCreator(apiUrl: string, formData: FormDa }) + }; +} + +// TODO: move this +// PARSE AML FILE + + +function parseAMLFile(apiUrl: string, connectionString: string, uploadedAMLSecrets: File, binary: string) { + const data = new FormData() + data.append('file', uploadedAMLSecrets) + const response = axios.post( + apiUrl + '/parse-aml-secrets', + data, + { + headers: { + storageConnectionString: connectionString, + + // observations: binary + } + }, + ) + console.log('response from parse AML secrets: ', response) + return response + +} + + + +export function parseAMLFileActionCreator(apiUrl: string, formData: FormData){ + return function (dispatch: Dispatch, getState: () => IAppState) { + const connectionString = getState().connectionState.connection?.connectionString || "" + + const uploadedAMLFile = formData.get('uploadAMLSecrets') as File + console.log('Uploaded AML secrets in action: ', uploadedAMLFile) + + // load data from file as binary + getFileFromInput(uploadedAMLFile) + .then(binary => { + console.log('returning binary: ', binary) + console.log('uploaded AML File: ', uploadedAMLFile) + return parseAMLFile(apiUrl, connectionString, uploadedAMLFile, binary) + .then((aml_config) => + + dispatch({ + type: PARSE_AML_SECRETS_SUCCESS, + payload: { + aml_config: aml_config + } + }), + (error) => dispatch({ + type: PARSE_AML_SECRETS_FAIL, + error: error + }), + ); + }) + + }; } \ No newline at end of file diff --git a/PyStationB/projects/Malvern/src/actions/ExperimentActionTypes.ts b/PyStationB/projects/Malvern/src/actions/ExperimentActionTypes.ts index f9d7ec9..2d8584e 100644 --- a/PyStationB/projects/Malvern/src/actions/ExperimentActionTypes.ts +++ b/PyStationB/projects/Malvern/src/actions/ExperimentActionTypes.ts @@ -7,7 +7,7 @@ export const GET_EXPERIMENT_OPTIONS_FAIL = "GET_EXPERIMENTS_FAIL" export const GET_EXPERIMENT_OPTIONS_SUCCESS = "GET_EXPERIMENTS_SUCCESS" export interface IGetExperimentOptionsResult { - experiment_options?: IPyBCKGExperiment[] + experiment_options?: any[] } export interface GettingExperiments { @@ -90,8 +90,7 @@ export const SUBMIT_EXPERIMENT_FAIL = "SUBMIT_EXPERIMENT_FAIL" export const SUBMIT_EXPERIMENT_SUCCESS = "SUBMIT_EXPERIMENT_SUCCESS" export interface ISubmitExperimentResult { - configPath: string, - observationsPath: string + amlRun?: IAMLRun } export interface ISubmittingExperiment { @@ -100,6 +99,9 @@ export interface ISubmittingExperiment { export interface ISubmitExperimentSuccess { type: typeof SUBMIT_EXPERIMENT_SUCCESS, + payload: { + amlRun: IAMLRun + } } export interface ISubmitExperimentFail { diff --git a/PyStationB/projects/Malvern/src/actions/ExperimentActions.ts b/PyStationB/projects/Malvern/src/actions/ExperimentActions.ts index 7e648c9..75ff572 100644 --- a/PyStationB/projects/Malvern/src/actions/ExperimentActions.ts +++ b/PyStationB/projects/Malvern/src/actions/ExperimentActions.ts @@ -1,11 +1,8 @@ import axios from "axios"; import { Dispatch } from "redux"; -import { IExperimentResult, IPyBCKGExperiment, IAMLRun } from "../components/Interfaces"; -import { - GET_AML_RUNIDS_FAIL, - GET_AML_RUNIDS_SUCCESS, - GET_EXPERIMENT_OPTIONS_FAIL, GET_EXPERIMENT_OPTIONS_SUCCESS, GET_EXPERIMENT_RESULTS_FAIL, GET_EXPERIMENT_RESULTS_SUCCESS, SUBMIT_EXPERIMENT_FAIL, SUBMIT_EXPERIMENT_SUCCESS -} from "./ExperimentActionTypes"; +import { IExperimentResult, IPyBCKGExperiment, IAMLRun, IAMLConfig } from "../components/Interfaces"; +import {GET_AML_RUNIDS_FAIL,GET_AML_RUNIDS_SUCCESS, GET_EXPERIMENT_OPTIONS_FAIL, GET_EXPERIMENT_OPTIONS_SUCCESS, GET_EXPERIMENT_RESULTS_FAIL, + GET_EXPERIMENT_RESULTS_SUCCESS, SUBMIT_EXPERIMENT_FAIL, SUBMIT_EXPERIMENT_SUCCESS} from "./ExperimentActionTypes"; import { IAppState } from "../reducers/RootReducer"; @@ -22,8 +19,6 @@ export function GetExperimentOptionsActionCreator(api_url: string) { // When this function is passed to `dispatch`, the thunk middleware will intercept it, // and call it with `dispatch` and `getState` as arguments. return function (dispatch: Dispatch, getState: () => IAppState) { - //const tableService = getState().connectionState.connection?.tableService - //console.log('table service within experiment action creator: ', tableService) const connectionString = getState().connectionState.connection?.connectionString || "" return getExperimentOptions(api_url, connectionString).then( @@ -104,34 +99,41 @@ export function GetExperimentResultActionCreator(api_url: string, selectedExperi } // SUBMIT NEW EXPERIMENT // -export function submitNewExperiment(api_url: string, connectionString: string, configPath:string, observationsPath:string) { - return axios.post( - api_url + '/submit-new-experiment', - { - headers: { - storageConnectionString: connectionString, - configPath: configPath, - observationsPath: observationsPath +export function submitNewExperiment(api_url: string, connectionString: string, configPath:string, + observationsPath:string, amlConfig: IAMLConfig) { + + const response = axios.post( + api_url + '/submit-new-experiment', + { + headers: { + storageConnectionString: connectionString, + configPath: configPath, + observationsPath: observationsPath, + amlConfig: amlConfig + } } - } - ) + ); + console.log("Response from submit new experiment: ", response); + return response; } export function SubmitExperimentActionCreator(api_url: string, formData: FormData) { return function (dispatch: Dispatch, getState: () => IAppState) { - const connectionString = getState().connectionState.connection?.connectionString || "" + const connectionString = getState().connectionState.connection?.connectionString || ""; const configPath = formData.get("configPath") as string const observationsPath = formData.get("observationsPath") as string + const emptyAMLConfig: IAMLConfig = {SubscriptionId: "", ResourceGroup: "", WorkspaceName: "", ComputeTarget: ""} + const amlConfig = getState().amlConfigState.amlConfigResult?.aml_config || emptyAMLConfig - return submitNewExperiment(api_url, connectionString, configPath, observationsPath).then( + return submitNewExperiment(api_url, connectionString, configPath, observationsPath, amlConfig).then( (res) => (res.data)) - .then((response) => + .then((newExperimentResponse) => dispatch({ type: SUBMIT_EXPERIMENT_SUCCESS, payload: { - experiment_result: response + amlRun: newExperimentResponse } }), (error) => dispatch({ diff --git a/PyStationB/projects/Malvern/src/components/Interfaces.tsx b/PyStationB/projects/Malvern/src/components/Interfaces.tsx index e5a4b3a..59fd48e 100644 --- a/PyStationB/projects/Malvern/src/components/Interfaces.tsx +++ b/PyStationB/projects/Malvern/src/components/Interfaces.tsx @@ -71,4 +71,17 @@ export interface IAbexConfig { PathToBlob: string, ConfigName: string, Timestamp: string +} + +export interface IAMLConfig { + SubscriptionId: string, + ResourceGroup: string, + WorkspaceName: string, + ComputeTarget: string +} + +export interface IAMLRun { + ExperimentName: string, + RunId: string, + RunUrl: string } \ No newline at end of file diff --git a/PyStationB/projects/Malvern/src/components/utils/validators.tsx b/PyStationB/projects/Malvern/src/components/utils/validators.tsx new file mode 100644 index 0000000..30bb74b --- /dev/null +++ b/PyStationB/projects/Malvern/src/components/utils/validators.tsx @@ -0,0 +1,16 @@ +import { IFormValues } from "./FormShared" + +export const isYaml = (fieldName: string, values: IFormValues): string => { + if (values[fieldName]) { + const selectedFile = values[fieldName] + console.log('selected file: ', selectedFile) + const fileType = selectedFile.type + if (fileType == 'application/yml') { + return "" + } else { + return "Please ensure file is .yml type" + } + } + console.log('No value for fieldname found in ', values) + return "Something went wrong - no file found"; +} diff --git a/PyStationB/projects/Malvern/src/pages/CloneExperimentPage.tsx b/PyStationB/projects/Malvern/src/pages/CloneExperimentPage.tsx index 44fa08c..595046f 100644 --- a/PyStationB/projects/Malvern/src/pages/CloneExperimentPage.tsx +++ b/PyStationB/projects/Malvern/src/pages/CloneExperimentPage.tsx @@ -7,11 +7,7 @@ import "../index.css" import { api_url } from "../components/utils/api"; import { connector, PropsFromRedux } from "../store/connectors"; import { IErrors, IFormValues } from "../components/utils/FormShared"; - -//interface ICloneState { -// experimentOptions: IPyBCKGExperiment[] -// selectedExperiment: IPyBCKGExperiment -//} +import { Container, Form } from "react-bootstrap"; interface IProps extends PropsFromRedux{ onSubmit: (values: IFormValues) => Promise; @@ -23,7 +19,7 @@ class CloneExperimentPage extends React.Component { super(props) this.state = { values: { - experimentOptions: [{}] as IPyBCKGExperiment[], + experimentOptions: [] , selectedExperiment: {} as IPyBCKGExperiment, }, errors: {}, @@ -45,13 +41,14 @@ class CloneExperimentPage extends React.Component { async getOptions() { this.props.getExperimentOptions(api_url) - const experimentOptions = this.props.getExperimentOptionsResult - const experiment_options = experimentOptions?.experiment_options || this.state.values.experimentOptions + const experimentOptionsResult = this.props.getExperimentOptionsResult + const experimentOptions = experimentOptionsResult?.experiment_options // TODO: update value of experiment_options in state? - - this.setValue("experimentOptions", experiment_options) - this.setValue("selectedExperiment", experiment_options[0]) + if (experimentOptions){ + this.setValue("experimentOptions", experimentOptions) + this.setValue("selectedExperiment", experimentOptions[0]) + } } @@ -85,57 +82,53 @@ class CloneExperimentPage extends React.Component { public render() { if (!this.props.connection?.connected) { return ( -
+

You are not connected. Please go to log in to create an experiment

-
+ ) } else { - const experimentOptions: IPyBCKGExperiment[] = this.state.values.experimentOptions if (this.state.submitted) { return ( -
+

Experiment submitted.

Your unique experiment id is ABC123.

Please make a note of this and check the Previous Experiments tab in a few hours.

-
+ ) } else { + const experimentOptionsResult = this.props.getExperimentOptionsResult + const experimentOptions: IPyBCKGExperiment[] = experimentOptionsResult?.experiment_options || this.state.values.experiment_options || [] + + return ( -
+

Repeat a previous experiment

-
- -
-
- - -
+ Select an Experiment: + +
+ +
-
+ ) } } } public handleSubmit = async (event: React.FormEvent) => { + // TODO: Create CloneExperiment action and replace this method event.preventDefault(); const response = axios.post<{ string: string }>( diff --git a/PyStationB/projects/Malvern/src/pages/NewExperimentFormPage.tsx b/PyStationB/projects/Malvern/src/pages/NewExperimentFormPage.tsx index e98753d..bdfe9cb 100644 --- a/PyStationB/projects/Malvern/src/pages/NewExperimentFormPage.tsx +++ b/PyStationB/projects/Malvern/src/pages/NewExperimentFormPage.tsx @@ -1,15 +1,14 @@ import React from "react" import { ISubmitResult, IFormState } from "../components/utils/Form" -import axios from "axios"; import UploadBox from "../components/utils/UploadBox"; import ExperimentHeader from "../components/RunExperiment/ExperimentTypeHeader"; import { IConfig } from "../components/Interfaces"; -//import "../index.css" import { api_url } from "../components/utils/api"; +import { isYaml } from "../components/utils/validators"; import { connector, PropsFromRedux } from "../store/connectors"; import { IErrors, IFormValues } from "../components/utils/FormShared"; import { IValidationProp } from "../components/utils/Validation"; -import { Container, Form, FormGroup } from "react-bootstrap"; +import { Container, Form } from "react-bootstrap"; interface IProps extends PropsFromRedux { onSubmit: (values: IFormValues) => Promise; @@ -23,21 +22,6 @@ interface IObservationFile { dateUploaded: string } -const isYaml = (fieldName: string, values: IFormValues): string => { - if (values[fieldName]) { - const selectedFile = values[fieldName] - console.log('selected file: ', selectedFile) - const fileType = selectedFile.type - if (fileType == 'application/yml') { - return "" - } else { - return "Please ensure file is .yml type" - } - } - console.log('No value for fieldname found in ', values) - return "Something went wrong - no file found"; -} - class NewExperimentFormPage extends React.Component { public constructor(props: IProps) { @@ -69,7 +53,7 @@ class NewExperimentFormPage extends React.Component { async getOptions() { this.props.getConfigOptions(api_url) - const cfgRes = this.props.getConfigOptionsResult //await axios.get(api_url + '/get-config-options') + const cfgRes = this.props.getConfigOptionsResult const configs = cfgRes?.config_options this.setValue("configOptions", configs) @@ -80,7 +64,6 @@ class NewExperimentFormPage extends React.Component { if (selectedExpType) { console.log('selected experiment type: ') console.log(selectedExpType) - //this.setState({ selectedExperiment: selectedExperiment }) this.setValue("selectedExperimentType", selectedExpType) } } @@ -118,14 +101,20 @@ class NewExperimentFormPage extends React.Component { ) } else { - - if (this.state.submitted) { + const submitNewExperimentResult = this.props.submitNewExperimentResult + const amlRun = submitNewExperimentResult?.amlRun + console.log("AML run: ", amlRun) + if (amlRun != undefined) { + const amlRunId = amlRun.RunId || "" + const amlUrl = amlRun.RunUrl || "" return ( -
+

Experiment submitted.

-

Your unique experiment id is ABC123.

+

Your unique experiment id is {amlRunId}.

+

See the status of your Run Here

Please make a note of this and check the Previous Experiments tab in a few hours.

-
+

Or to start a new experiment, hit refresh.

+ ) } else { const configOptions: IConfig[] = this.state.values.configOptions @@ -133,8 +122,8 @@ class NewExperimentFormPage extends React.Component { -

Start a new iteration for an existing track

-
+

Start a new experiment

+ {/* -
-

or

+ */} + {/*

or

*/} Upload new config (.yml) - -
-

Upload observation file (.csv)

+ + Upload observation file (.csv) -
- + Azure ML connection secrets (.yml) + +
-
- -
+
{this.props.error &&

Error { this.props.error.response.status }: {this.props.error.response.data.reason}

}
-
-
) @@ -228,7 +207,7 @@ class NewExperimentFormPage extends React.Component { private uploadConfigFile = async (fileSelected: File): Promise => { // Upload the .yml to Azure Storage, and also to local memory, for submitting experiment const formData = new FormData(); - console.log('file seelcted going into form data: ', fileSelected) + if (fileSelected && fileSelected.name) { formData.append( @@ -238,11 +217,31 @@ class NewExperimentFormPage extends React.Component { ); } - //for (const key of formData.entries()) { - // console.log(key[0] + ', ' + key[1]); - //} + console.log('file selected going into form data: ', fileSelected) this.props.uploadConfig(api_url, formData) + if (this.props.error) { + return {success: false} + } + return { success: true }; + + }; + + private parseAMLSecrets = async (fileSelected: File): Promise => { + // Parse AML secrets file to connect to AML in order to submit experiments + const formData = new FormData(); + console.log('file selected going into form data: ', fileSelected) + + if (fileSelected && fileSelected.name) { + formData.append( + "uploadAMLSecrets", + fileSelected, + fileSelected.name + ); + } + + this.props.parseAMLFile(api_url, formData) + console.log('props after upload: ', this.props) if (this.props.error) { @@ -250,11 +249,12 @@ class NewExperimentFormPage extends React.Component { } return { success: true }; - }; + } public handleSubmit = async (event: React.FormEvent) => { event.preventDefault(); + console.log("State in handleSubmit", this.state) // Upload boxes are separate forms, so config and observations wont be in event target const filledform = event.currentTarget const selectedConfigPath = this.props.uploadConfigResult.filePath @@ -281,10 +281,13 @@ class NewExperimentFormPage extends React.Component { this.props.submitNewExperiment(api_url, formData) + if (this.props.error) { + return { success: false } + } else { + this.setState({'submitted': true}) return { success: true } } - return { success: false } } } diff --git a/PyStationB/projects/Malvern/src/pages/NewIterationPage.tsx b/PyStationB/projects/Malvern/src/pages/NewIterationPage.tsx index ba2adcc..b2ae9b8 100644 --- a/PyStationB/projects/Malvern/src/pages/NewIterationPage.tsx +++ b/PyStationB/projects/Malvern/src/pages/NewIterationPage.tsx @@ -1,7 +1,7 @@ import axios from "axios"; import React from "react" -import { IPyBCKGExperiment } from "../components/Interfaces"; -import { IFormContext, IFormState, ISubmitResult } from "../components/utils/Form"; +import { IAbexConfig, IPyBCKGExperiment } from "../components/Interfaces"; +import { IFormContext, IFormState, ISubmitResult } from "../components/utils/Form"; import UploadBox from "../components/utils/UploadBox"; import ExperimentHeader from "../components/RunExperiment/ExperimentTypeHeader"; import "../index.css" @@ -9,6 +9,8 @@ import { api_url } from "../components/utils/api"; import { connector, PropsFromRedux } from "../store/connectors"; import { IValidationProp } from "../components/utils/Validation"; import { IErrors, IFormValues } from "../components/utils/FormShared"; +import { Container, Form } from "react-bootstrap"; +import { isYaml } from "components/utils/validators"; interface IProps extends PropsFromRedux{ onSubmit: (values: IFormValues) => Promise; @@ -16,11 +18,6 @@ interface IProps extends PropsFromRedux{ submitted: boolean } -interface IConfig { - id: string, - name: string -} - interface IObservationFile { id: string, name: string, @@ -53,9 +50,9 @@ class NewIterationPage extends React.Component { values: { selectedIteration: "", iterationOptions: [""], - experimentOptions: [{}] as IPyBCKGExperiment[], + experimentOptions: [], selectedExperiment: {} as IPyBCKGExperiment, - selectedConfig: {} as IConfig, + selectedConfig: {} as IAbexConfig, uploadedCSV: {} as IObservationFile, configOptions: [] }, @@ -71,20 +68,26 @@ class NewIterationPage extends React.Component { } async getOptions() { - //const expRes = await axios.get(api_url + '/get-experiment-options') - //const experiments = expRes.data this.props.getExperimentOptions(api_url) + const experimentOptionsResult = this.props.getExperimentOptionsResult + const experimentOptions = experimentOptionsResult?.experiment_options + + if (experimentOptions){ + this.setValue("experimentOptions", experimentOptions) + this.setValue("selectedExperiment", experimentOptions[0]) + } + + this.props.getConfigOptions(api_url) + const cfgRes = this.props.getConfigOptionsResult + const config_options = cfgRes?.config_options + console.log("Config options: ", config_options) - const experimentOptions = this.props.getExperimentOptionsResult - const experiment_options = experimentOptions?.experiment_options || this.state.values.experimentOptions - // TODO: replace with call in middleware - const cfgRes = await axios.get(api_url + '/get-config-options') - const configs = cfgRes.data + if (cfgRes){ + console.log("Updating config options") + this.setValue("configOptions", config_options) - this.setValue("experimentOptions", experiment_options) - this.setValue("selectedExperiment", experiment_options[0]) - this.setValue("configOptions", configs) + } } private changeExperimentFields = (e: React.ChangeEvent) => { @@ -102,8 +105,8 @@ class NewIterationPage extends React.Component { private changeConfigFields = (e: React.ChangeEvent) => { const selectedConfigName = e.target.value; - const configOptions: IConfig[] = this.state.values.configOptions - const selectedConfig = configOptions.find(cfg => cfg.name === selectedConfigName) + const configOptions: IAbexConfig[] = this.state.values.configOptions + const selectedConfig = configOptions.find(cfg => cfg.ConfigName === selectedConfigName) if (selectedConfig) { console.log('selected config: ') console.log(selectedConfig) @@ -147,83 +150,69 @@ class NewIterationPage extends React.Component { }; if (this.state.submitted) { return ( -
+

Experiment submitted.

Your unique experiment id is ABC123.

Please make a note of this and check the Previous Experiments tab in a few hours.

-
+ ) } else { - const experimentOptions: IPyBCKGExperiment[] = this.state.values.experimentOptions - const configOptions: IConfig[] = this.state.values.configOptions + console.log("state: ", this.state) + const experimentOptionsResult = this.props.getExperimentOptionsResult + const experimentOptions: IPyBCKGExperiment[] = experimentOptionsResult?.experiment_options || this.state.values.experimentOptions + + const configOptionsResult = this.props.getConfigOptionsResult + const configOptions: IAbexConfig[] = configOptionsResult?.config_options || this.state.values.configOptions return ( -
+

Start a new iteration for an existing track

-
- -
-
-
-

Select or upload config

- -
-
-

Upload new config (.yml)

- -
-
-

Upload observation file (.csv)

- -
-
-
- - -
+ Select an Experiment: + +
+ Select an existing config: + +
+ OR Upload new config (.yml) + + Upload observation file (.csv) + +
+ + -
+ ) } @@ -286,16 +275,12 @@ class NewIterationPage extends React.Component { ); } - const response = axios.post(api_url + "/upload-config-data", formData - ).then(res => { - // Set value of 'config' in state - const newConfig = {'name': fileSelected.name} - this.setValue('selectedConfig', newConfig) - return { success: true }; - }); - + this.props.uploadConfig(api_url, formData) - return { success: false }; + if (this.props.error) { + return {success: false} + } + return { success: true }; }; @@ -305,26 +290,24 @@ class NewIterationPage extends React.Component { if (fileSelected && fileSelected.name) { formData.append( - "uploadedCSV", + "uploadObservations", fileSelected, fileSelected.name ); } - const response = axios.post(api_url + "/upload-observation-data", formData - ).then(res => { - const newCSV = { 'name': fileSelected.name } - this.setValue('uploadedCSV', newCSV) - return { success: true }; - }); - + this.props.uploadObservations(api_url, formData) - return { success: false }; + if (this.props.error) { + return { success: false } + } + return { success: true }; }; public handleSubmit = async (event: React.FormEvent) => { + // TODO: create SubmitNewIterationAction and replace this method event.preventDefault(); const response = axios.post<{ string: string }>( diff --git a/PyStationB/projects/Malvern/src/pages/ViewExperimentResultsPage.tsx b/PyStationB/projects/Malvern/src/pages/ViewExperimentResultsPage.tsx index 840c2ad..125e54c 100644 --- a/PyStationB/projects/Malvern/src/pages/ViewExperimentResultsPage.tsx +++ b/PyStationB/projects/Malvern/src/pages/ViewExperimentResultsPage.tsx @@ -1,9 +1,7 @@ -import axios from "axios"; import React from "react" import { IAMLRun, IExperimentResult, IPyBCKGExperiment } from "../components/Interfaces"; import { ISubmitResult } from "../components/utils/Form"; import Tabs from "../components/utils/Tabs"; -import "../index.css" import { api_url } from "../components/utils/api"; import { connector, PropsFromRedux } from "../store/connectors"; import { IFormValues } from "../components/utils/FormShared"; @@ -124,40 +122,53 @@ class ExperimentSelectorPage extends React.Component async getOptions() { this.props.getExperimentOptions(api_url) - this.props.getAMLRunIdOptions(api_url) + // this.props.getAMLRunIdOptions(api_url) - console.log('props after calling getAMLRunIdOptions: ', this.props) const amlRunOptions = this.props.getAMLRunIdsResult const aml_runid_options = amlRunOptions?.aml_run_ids if (aml_runid_options) { this.setState({ aml_runid_options: aml_runid_options }) } - const experimentOptions = this.props.getExperimentOptionsResult - const experiment_options = experimentOptions?.experiment_options + const experimentOptionsResult = this.props.getExperimentOptionsResult + const experimentOptions= experimentOptionsResult?.experiment_options + + if (experimentOptions) { + this.setState({experimentOptions: experimentOptions}) + console.log(this.state) + + const selectedExperiment: IPyBCKGExperiment = experimentOptions[0] + console.log("Selected experiment: ", selectedExperiment) - if (experiment_options) { // To begin with, set a default selected experiment, so that iteration options update properly - const selectedExperiment: IPyBCKGExperiment = experiment_options[0] + this.setState({ + experimentOptions: experimentOptions, + selectedExperiment: selectedExperiment + }) + } - this.props.getExperimentResult(api_url, selectedExperiment) - const getExperimentResult = this.props.getExperimentResultResult - const experiment_result = getExperimentResult?.experiment_result + if (this.state.selectedExperiment.Name){ + this.props.getExperimentResult(api_url, this.state.selectedExperiment) + const getExperimentResultResult = this.props.getExperimentResultResult + const experimentResult = getExperimentResultResult?.experiment_result - const iterationOptions: string[] = experiment_result?.iterations || [] - const selectedIteration: string = iterationOptions[0] - const foldOptions = experiment_result?.folds || [] - const selectedFold: string = foldOptions[0] + if (experimentResult){ - this.setState({ - experimentOptions: experiment_options, - selectedExperiment: selectedExperiment, - iterationOptions: iterationOptions, - selectedIteration: selectedIteration, - foldOptions: foldOptions, - selectedFold: selectedFold - }) + const iterationOptions: string[] = experimentResult?.iterations || [] + const selectedIteration: string = iterationOptions[0] + const foldOptions = experimentResult?.folds || [] + const selectedFold: string = foldOptions[0] + + this.setState({ + + iterationOptions: iterationOptions, + selectedIteration: selectedIteration, + foldOptions: foldOptions, + selectedFold: selectedFold + }) + } } + } componentDidMount() { @@ -256,7 +267,10 @@ class ExperimentSelectorPage extends React.Component } public render() { - const experimentOptions: IPyBCKGExperiment[] = this.props.getExperimentOptionsResult?.experiment_options || this.state.experimentOptions + // TODO: ExperimentOptions should be repalced with completed AML runs but they currently dont have associated iterations, epochs etc + const experimentOptionsResult = this.props.getExperimentOptionsResult + const experimentOptions: IPyBCKGExperiment[] = experimentOptionsResult?.experiment_options || this.state.experimentOptions + const amlRunIdResult = this.props.getAMLRunIdsResult const amlRunIdOptions = amlRunIdResult?.aml_run_ids || this.state.aml_runid_options @@ -272,13 +286,13 @@ class ExperimentSelectorPage extends React.Component

View experiment results