diff --git a/.gitignore b/.gitignore
index ff978ef..7c739b0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,6 @@
*.log*
**/secret/
*.sql
+id_rsa
+answer_*.csv
+location_*.csv
diff --git a/README.md b/README.md
index 9f16f18..f905006 100644
--- a/README.md
+++ b/README.md
@@ -15,10 +15,13 @@ English description:
- [Install PostgreSQL (administrator only)](#install-postgresql)
- [Setup back-end (administrator only)](#setup-back-end)
- [Setup development environment](#setup-dev-env)
+- [Docker files for setup back-end development environment](#docker-back-end-dev)
- [Manipulate database](#manipulate-database)
- [Test cases](#test-cases)
- [Deploy back-end using uwsgi (administrator only)](#deploy-back-end-using-uwsgi)
- [API calls](#api-calls)
+- [How to test back-end with FLASK](#back-end-web-test)
+- [Deploy and export data from server](#deploy_export_data)
# Coding standards
When contributing code for this repository, please follow the guidelines below:
@@ -395,3 +398,81 @@ $.ajax({
error: function (xhr) {console.error(xhr)}
});
```
+# Docker files for setup back-end development environment
+### Purpose
+- With the sample docker file, back-end developers can prepare the working environment quickly without disturbing existing configurations. It's especially effective for Windows developers to prepare the linux-based back-end developing environment.
+### Steps for creating the working environment with Docker
+- First you need to install Docker.
+- Build the docker container by exec the following command. Below are examples testing on Windows.
+```sh
+docker build -t spotdiff C:/source/SpotDiff/back-end/docker_env/
+```
+- Build a volume for postgresql-data
+```sh
+docker volume create --name postgresql-data
+```
+
+- Start the container we just built and mount the volume
+```sh
+# 5432 is the port that running the container
+docker run -d -p 5432:5432 --name spotdiff --restart always -v postgresql-data:/var/lib/postgresql/data -e POSTGRES_PASSWORD=xxxx -v c:/source/SpotDiff:/code/spotdiff spotdiff
+```
+- enter the shell we just created
+```sh
+docker exec -it spotdiff bash
+$su postgres
+$psql
+```
+
+# How to test back-end with FLASK
+### Purpose
+- For testing with web environment before the client ready, some useful commmands are provided here.
+
+## Steps
+- First you need to make sure the FLASK is running.
+```sh
+FLASK_APP=application.py FLASK_ENV=development flask run
+```
+- Register an user
+```sh
+curl -d '{"client_id":"123"}' -H "Content-Type: application/json" -X POST
+```
+- A response of user_token will be returned. For example:
+```sh
+{
+ "user_token": "XXXX"
+}
+```
+- Then you can copy the above user_token to make queries. For example:
+```sh
+curl -H "Content-Type: application/json" -X GET http://localhost:8080/status?user_token=XXXX
+```
+- An example of the response is like this:
+```sh
+{
+ "individual_done_count": 0,
+ "location_is_done_count": 0,
+ "user_count": 4
+}
+```
+- Test getting locations example:
+```sh
+curl -H "Content-Type: application/json" -X GET http://localhost:8081/location?size=5\&gold_standard_size=1\&user_token=XXXX
+```
+- Test submitting answer example:
+```sh
+curl -d '{"user_token":XXXX","data":[{"location_id":1, "year_new":2007, "year_old":2020, "source_url_root":"www.test.org", "bbox_left_top_lat":24.0962704615941, "bbox_left_top_lng":"120.462878886353","bbox_bottom_right_lat":24.0962704615941, "bbox_bottom_right_lng":120.462878886353, "land_usage":1, "expansion":1, "zoom_level":0}, {"location_id":20, "year_new":2017,
+"year_old":2010, "source_url_root":"www.test.org", "bbox_left_top_lat":24.0962704615941, "bbox_left_top_lng":"120.462878886353","bbox_bottom_right_lat":24.0962704615941, "bbox_bottom_right_lng":120.462878886353, "land_usage":1, "expansion":1, "zoom_level":0}]}' -H "Content-Type: application/json" -X POST http://localhost:8080/answer/
+```
+
+# Deploy and export data from server
+### Import locations to database
+- back-end/www/util/location_import.py
+### Import gold standards from CSV file to database
+- back-end/www/util/import_gold_standards_from_csv.py
+### Export locations from database, including it's done_at information, to a CSV file
+- back-end/www/util/export_locations.py
+### Clear done location flag (done_at) if we want more answers for a location
+- back-end/www/util/clear_location_done.py
+### Export all answers from database to a CSV file
+- back-end/www/util/export_answers.py
\ No newline at end of file
diff --git a/back-end/docker_env/Dockerfile b/back-end/docker_env/Dockerfile
new file mode 100644
index 0000000..719e022
--- /dev/null
+++ b/back-end/docker_env/Dockerfile
@@ -0,0 +1,32 @@
+#modified by sourbiebie for SpotDiff
+#from postgres(SQL + linux), install miniconda, flask, and pip
+FROM postgres
+
+# System packages
+RUN apt-get update && apt-get install -y curl
+
+
+# Install miniconda to /miniconda
+RUN curl -LO https://repo.anaconda.com/miniconda/Miniconda3-py38_4.9.2-Linux-x86_64.sh
+RUN bash Miniconda3-py38_4.9.2-Linux-x86_64.sh -p /miniconda -b
+RUN rm Miniconda3-py38_4.9.2-Linux-x86_64.sh
+ENV PATH=/miniconda/bin:${PATH}
+RUN conda update -y conda
+
+# System packages
+RUN apt-get update && apt-get install -y \
+ vim \
+ git \
+ libpq-dev \
+ python-dev \
+ python3-pip
+
+
+ENV PYTHONUNBUFFERED 1
+RUN mkdir /code
+WORKDIR /code
+
+ADD requirements.txt /code/
+RUN pip install -r requirements.txt
+
+ADD . /code/
\ No newline at end of file
diff --git a/back-end/docker_env/requirements.txt b/back-end/docker_env/requirements.txt
new file mode 100644
index 0000000..83c5717
--- /dev/null
+++ b/back-end/docker_env/requirements.txt
@@ -0,0 +1,11 @@
+psycopg2
+flask==1.1.2
+flask-sqlalchemy==2.5.1
+flask-migrate==2.7.0
+flask-marshmallow==0.14.0
+marshmallow-sqlalchemy==0.24.2
+flask-cors==3.0.10
+marshmallow_enum==1.5.1
+flask-testing==0.8.1
+psycopg2-binary==2.8.6
+pyjwt==2.1.0
\ No newline at end of file
diff --git a/back-end/www/application.py b/back-end/www/application.py
index a12d6d2..f5ea702 100644
--- a/back-end/www/application.py
+++ b/back-end/www/application.py
@@ -16,7 +16,7 @@
app.register_blueprint(location_controller.bp, url_prefix="/location")
app.register_blueprint(status_controller.bp, url_prefix="/status")
app.register_blueprint(answer_controller.bp, url_prefix="/answer")
-CORS(app, resources={r"/.*": {"origins": ["https://disfactory-spotdiff.netlify.app"]}})
+CORS(app, resources={r"/.*": {"origins": ["https://disfactory-spotdiff.netlify.app", "https://netlify.app"]}})
# Set database migration
migrate = Migrate(app, db)
diff --git a/back-end/www/models/model.py b/back-end/www/models/model.py
index 5a957fd..d41aa6b 100644
--- a/back-end/www/models/model.py
+++ b/back-end/www/models/model.py
@@ -109,6 +109,7 @@ class Answer(db.Model):
0 meas golden standard provided by admin.
1 means because the user passes the golden test, the answer may be good quality.
2 means because the user failed the golden test, the answer may not be good quality.
+ 3 means the gold standard is obsoleted by admin.
bbox_left_top_lat : float
The latitude of the top-left corner of the bounding box for displaying the focus.
bbox_left_top_lng : float
diff --git a/back-end/www/models/model_operations/answer_operations.py b/back-end/www/models/model_operations/answer_operations.py
index 8076c73..92e87c4 100644
--- a/back-end/www/models/model_operations/answer_operations.py
+++ b/back-end/www/models/model_operations/answer_operations.py
@@ -223,6 +223,44 @@ def remove_answer(answer_id):
db.session.commit()
+def set_answer(answer_id, new_status, land_usage, expansion):
+ """
+ Update an answer.
+
+ Parameters
+ ----------
+ answer_id : int
+ ID of the answer.
+ new_status : int
+ gold standard status of answer
+ land_usage : int
+ answer's land_usage
+ expansion : int
+ answer's expansion
+
+ return
+ ------
+ answer
+
+ Raises
+ ------
+ exception : Exception
+ When new_status is not an integer
+ """
+ if not isinstance(new_status, int):
+ raise Exception("The new_status shall be an integer")
+
+ answer = get_answer_by_id(answer_id)
+
+ if answer is not None:
+ answer.gold_standard_status = new_status
+ answer.land_usage = land_usage
+ answer.expansion = expansion
+ db.session.commit()
+
+ return answer
+
+
def exam_gold_standard(location_id, land_usage, expansion):
"""
Check the quality of the answer in comparison with the gold standard.
@@ -287,11 +325,17 @@ def is_answer_reliable(location_id, land_usage, expansion):
True : Matches another good answer candiate.
False : No other good answer candidates exist or match.
"""
+ # Count of answers to mark a location done
+ # currently this is set to 3 to control 4 "reliable" answers to mark the location done
+ ANSWER_COUNTS_TO_MARK_LOC_DONE = 1
+
# If another user passed the gold standard quality test, and submitted an answer to the same location.
good_answer_candidates = Answer.query.filter_by(gold_standard_status=1, location_id=location_id, land_usage=land_usage, expansion=expansion).all()
# If the good answer candidate doesn't exist
- if len(good_answer_candidates) == 0:
+ #if len(good_answer_candidates) == 0: # 2 are considered as good, need at least 1
+ #if len(good_answer_candidates) < 2: # 3 are considered as good, need at least 2
+ if len(good_answer_candidates) < ANSWER_COUNTS_TO_MARK_LOC_DONE:
return False
else:
return True
diff --git a/back-end/www/tests/answer_tests.py b/back-end/www/tests/answer_tests.py
index 068011b..77cc6c7 100644
--- a/back-end/www/tests/answer_tests.py
+++ b/back-end/www/tests/answer_tests.py
@@ -341,6 +341,33 @@ def test_is_answer_reliable(self):
a4 = answer_operations.create_answer(user4.id, l1.id, 2000, 2010, "", 1, 1, PASS_GOLD_TEST,
0, 0, 0, 0, 0)
+
+ def test_set_answer(self):
+ """
+ Create a gold answer then change it to disabled.
+ Get it again and check if the gold_standard_status updated.
+ """
+ IS_GOLD_STANDARD = 0
+ IS_DISABLED_GOLD_STANDARD = 3
+ BBOX_LEFT_TOP_LAT = 0.1
+ BBOX_LEFT_TOP_LNG = 0.2
+ BBOX_BOTTOM_RIGHT_LAT = 0.3
+ BBOX_BOTTOM_RIGHT_LNG = 0.4
+ user_admin = user_operations.create_user("ADMIN")
+ l1 = location_operations.create_location("AAA")
+
+ A_gold = answer_operations.create_answer(user_admin.id, l1.id, 2000, 2010, "", 0, 1, IS_GOLD_STANDARD,
+ BBOX_LEFT_TOP_LAT, BBOX_LEFT_TOP_LNG, BBOX_BOTTOM_RIGHT_LAT, BBOX_BOTTOM_RIGHT_LNG, 0)
+
+ assert(A_gold.gold_standard_status==IS_GOLD_STANDARD)
+
+ A_updated = answer_operations.set_answer(A_gold.id, IS_DISABLED_GOLD_STANDARD, 2, 0)
+
+ assert(A_updated.gold_standard_status==IS_DISABLED_GOLD_STANDARD)
+ assert(A_updated.land_usage==2)
+ assert(A_updated.expansion==0)
+
+
def test_batch_process_answers(self):
"""
Location #l1 has gold standard.
diff --git a/back-end/www/tests/location_tests.py b/back-end/www/tests/location_tests.py
index 753d66d..c9a2b7e 100644
--- a/back-end/www/tests/location_tests.py
+++ b/back-end/www/tests/location_tests.py
@@ -72,6 +72,9 @@ def test_set_location_done(self):
location_done = location_operations.set_location_done(location.id, True)
assert(location_done.done_at != None)
+
+ location_done = location_operations.set_location_done(location.id, False)
+ assert(location_done.done_at == None)
def test_get_locations(self):
"""
diff --git a/back-end/www/util/clear_location_done.py b/back-end/www/util/clear_location_done.py
new file mode 100644
index 0000000..c99bad5
--- /dev/null
+++ b/back-end/www/util/clear_location_done.py
@@ -0,0 +1,67 @@
+"""
+The script loads the CSV and clears the don_at of locations in location table.
+
+Config
+------
+CSV_FILE_NAME : The CSV file to be import. Ensure the IDs are in row 1 (index from 0)
+CFG_NAME : The config name can be Develpment, Production
+
+Output
+------
+The total location numbers after import.
+
+"""
+CSV_FILE_NAME = "production_20220505.csv"
+CFG_NAME = "config.config.DevelopmentConfig"
+
+import sys
+import os
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
+import csv
+from models.model import db
+from models.model_operations import location_operations
+from config.config import Config
+from flask import Flask
+from controllers import root
+
+# init db
+app = Flask(__name__)
+app.register_blueprint(root.bp)
+app.config.from_object(CFG_NAME)
+db.init_app(app)
+app.app_context().push()
+
+# If need to re-create the tables
+#db.drop_all()
+#db.create_all()
+
+# open file for reading
+with open(CSV_FILE_NAME) as csvDataFile:
+
+ # read file as csv file
+ csvReader = csv.reader(csvDataFile)
+
+ # Skip the first row of the field name
+ next(csvReader)
+ loc_count = 0;
+
+ # for every row, insert the id(row 1) into the location table
+ for row in csvReader:
+ location = location_operations.get_location_by_factory_id(row[0])
+ #print("location is", row[0])
+ if location is None:
+ #location_operations.create_location(row[0])
+ print("Cannot find the location ", row[0])
+ #loc_count = loc_count + 1
+ else:
+ location_operations.set_location_done(location.id, False)
+ loc_count = loc_count + 1
+
+
+
+print("reset locations done numbers:", loc_count)
+#count = location_operations.get_location_count()
+#print("Location count is ", count)
+
+db.session.remove()
+db.session.close()
diff --git a/back-end/www/util/export_locations.py b/back-end/www/util/export_locations.py
new file mode 100644
index 0000000..f32fc8f
--- /dev/null
+++ b/back-end/www/util/export_locations.py
@@ -0,0 +1,53 @@
+"""
+The script exports the answer table to a CSV file.
+
+Config
+------
+CFG_NAME : The config name can be Develpment, Production
+
+Output
+------
+The total location table in CSV file. (location_YYYY_MM_DD_HH_mm_ss.csv)
+
+"""
+#CFG_NAME = "config.config.DevelopmentConfig"
+CFG_NAME = "config.config.ProductionConfig"
+
+import sys
+import os
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
+import csv
+from models.model import db
+from models.model import Location
+from models.model_operations import location_operations
+from models.model_operations import user_operations
+from config.config import Config
+from flask import Flask
+from controllers import root
+import datetime
+
+# init db
+app = Flask(__name__)
+app.register_blueprint(root.bp)
+app.config.from_object(CFG_NAME)
+db.init_app(app)
+app.app_context().push()
+
+cvs_file_name = "location_" + datetime.datetime.today().strftime("%Y_%m_%d_%H_%M_%S") + ".csv"
+print("Exporting answers to " + cvs_file_name + "...")
+
+# Get all answers
+location_query = Location.query.order_by(Location.factory_id)
+locations = location_query.all()
+
+with open(cvs_file_name, "w", newline="") as csvDataFile:
+ # Write header
+ csvWriter = csv.writer(csvDataFile, delimiter=",", quotechar='|', quoting=csv.QUOTE_MINIMAL)
+ csvWriter.writerow(["factory_id", "id", "done_at", "answer_count"])
+ for location in locations:
+ # Write each record in answer table
+ csvWriter.writerow([location.factory_id, location.id, location.done_at, len(location.answers)])
+
+print("{} records reported.".format(len(locations)))
+db.session.remove()
+db.session.close()
diff --git a/back-end/www/util/import_gold_standards_from_csv.py b/back-end/www/util/import_gold_standards_from_csv.py
index 36722f7..f866457 100644
--- a/back-end/www/util/import_gold_standards_from_csv.py
+++ b/back-end/www/util/import_gold_standards_from_csv.py
@@ -11,8 +11,14 @@
The total location numbers after import.
"""
-CSV_FILE_NAME = "gold_answers.csv"
-CFG_NAME = "config.config.DevelopmentConfig"
+CSV_FILE_NAME = "20220427_answer_gold_standard.csv"
+#CSV_FILE_NAME = "50_answer_gold_standard.csv"
+
+# For Staging server
+#CFG_NAME = "config.config.DevelopmentConfig"
+
+# For production server
+CFG_NAME = "config.config.ProductionConfig"
import sys
import os
@@ -39,6 +45,7 @@
admin_id = 0
ans_count = 0
+update_count = 0
u1 = user_operations.get_user_by_client_id("admin")
if u1 is None:
print("Create admin.")
@@ -53,18 +60,31 @@
next(csvReader)
# for every row, insert the id(row 1) into the location table
+
for row in csvReader:
if row[0] is None:
break
location = location_operations.get_location_by_factory_id(row[0])
- if location is not None:
- print("location_id is: {}".format(row[0]))
- answer = answer_operations.create_answer(u1.id, location.id, int(row[2]), int(row[1]), "", int(row[3]), int(row[4]), 0)
- ans_count = ans_count + 1
- else:
- print("Cannot insert {}".format(row[0]))
+ if location is None:
+ location = location_operations.create_location(row[0])
+ print("Create location : {}".format(location.id))
+
+ gold_answer = answer_operations.get_gold_answer_by_location(location.id)
+ # If the gold answer exists, and the new gold_standard_status is specified
+ if gold_answer is not None and len(row) >5 and row[5] is not None:
+ answer_operations.set_answer(gold_answer.id, int(row[5]), int(row[3]), int(row[4]))
+ print("Update answer of location {}".format(row[0]))
+ update_count = update_count + 1
+ elif gold_answer is None:
+ # If the gold standard doesn't exist, and the file specify a gold standard, or in old format to create one
+ if (len(row) > 5 and int(row[5]) == 0) or (len(row) <= 5):
+ answer = answer_operations.create_answer(u1.id, location.id, int(row[2]), int(row[1]), "", int(row[3]), int(row[4]), 0)
+ print("create answer")
+ ans_count = ans_count + 1
+
+print("Updated {} gold standards.".format(update_count))
print("Insert {} gold standards. ".format(ans_count))
total_ans_count = answer_operations.get_gold_answer_count()
print("Total gold standard cout is : {} ".format(total_ans_count))
diff --git a/back-end/www/util/import_locations_from_csv.py b/back-end/www/util/import_locations_from_csv.py
index b09de44..466c44c 100644
--- a/back-end/www/util/import_locations_from_csv.py
+++ b/back-end/www/util/import_locations_from_csv.py
@@ -4,14 +4,14 @@
Config
------
CSV_FILE_NAME : The CSV file to be import. Ensure the IDs are in row 1 (index from 0)
-CFG_NAME : The config name can be Develpment, Staging, Testing
+CFG_NAME : The config name can be Develpment, Production
Output
------
The total location numbers after import.
"""
-CSV_FILE_NAME = "api.factory_100.csv"
+CSV_FILE_NAME = "production_20220505.csv"
CFG_NAME = "config.config.DevelopmentConfig"
import sys
@@ -43,12 +43,19 @@
# Skip the first row of the field name
next(csvReader)
+ loc_count = 0;
# for every row, insert the id(row 1) into the location table
for row in csvReader:
- location_operations.create_location(row[1])
-
-
+ location = location_operations.get_location_by_factory_id(row[0])
+ #print("location is", row[0])
+ if location is None:
+ location_operations.create_location(row[0])
+ #print("import location ", row[0])
+ loc_count = loc_count + 1
+
+
+print("Import locations :", loc_count)
count = location_operations.get_location_count()
print("Location count is ", count)