-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrun.py
More file actions
138 lines (104 loc) · 3.44 KB
/
run.py
File metadata and controls
138 lines (104 loc) · 3.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import pandas as pd
import argparse
from src.preprocess import bound, negate, non_dimensionalize, deduplicate
from src.create import build_coefficient_matrix
from src.eigs import remove_inf_eigs
from src.io import *
#
# filename: run.py
#
# @author: Alberto Serrano
#
# purpose:
#
DATA_FILENAME = "data/OB_crossslot_symmetric_De0.36.csv"
MTLB_FILENAME = "out/coefficient_matrices"
def init(args):
"""
Initialize data by using preexisting dataframe in pickle file. If there is
no file, read in the csv file and preprocess the data. Save the preprocessed
data into a pickle file for later use.
"""
# Extract the filename of the csv file from the path and remove the file
# extension.
filename = "".join(args["file"].split("/")[-1].split(".")[:-1])
# Check if preprocessed data frame exists in intermediate_data directory
if check_dataframe(filename) and args["use_pickle"]:
print("Loading preexisting pickle file: {0}".format(filename))
df = read_dataframe(filename)
# If preprocessed data does not exist, load raw data and preprocess
else:
path = args["file"]
print("Reading in file: {0}".format(path))
# Create data frame with simulation data.
# out.csv is pre-filtered data from a 90-degree bend flow.
# Simulation properties include: De = 5, delta = 1e-2, shear banding
df = pd.read_csv(path)
# Bound data
df = bound(df)
# Fix values in data by negating those on right side
negate(df)
# Convert dimensional data into non-dimensionalized
non_dimensionalize(df)
# Remove duplicate values, typically occur on the line of symmetry
# Updates indices
deduplicate(df, up_index=True)
# df["Boundary"] = [1] * df.shape[0]
df["Points:0"] = df["Points:0"].round(3)
df["Points:1"] = df["Points:1"].round(3)
# Save dataframe for later use
save_dataframe(df, filename)
return df
def coefficient_matrix_setup(df):
"""
Build the coefficient matrices and save for later use.
"""
# Run with neighbor implementation
A, B = build_coefficient_matrix(df)
# import matplotlib.pyplot as plt
# import seaborn as sns
#
# sns.set()
#
# fig, axes = plt.subplots(figsize=(9,9), dpi=100)
# axes.scatter(df["Points:0"], df["Points:1"], c=df["Boundary"], s=7)
# axes.set_title("Cross slot with boundaries")
# axes.set_xlabel("x")
# axes.set_ylabel("y")
#
# plt.savefig("cross_bounds.png")
# plt.show()
# Remove infinite eigenvalues
F, G_B = remove_inf_eigs(A, B)
# Save coefficient matrices
save_matrix_to_ml(F, G_B, MTLB_FILENAME)
def main():
# Create argument parser
ap = argparse.ArgumentParser(description="Create sparse matrix from data and more")
# Add file argument
ap.add_argument(
"-f",
"--file",
type=str,
help="Name csv file",
default=DATA_FILENAME
)
ap.add_argument(
"-v",
"--verbose",
help="increase output verbosity",
action="store_true"
)
ap.add_argument(
"-up",
"--use-pickle",
help="Ignore preexisting preprocessed data",
action="store_true"
)
# Parse arguments
args = vars(ap.parse_args())
# Load data frame
df = init(args)
# Setup coefficient matrices and save.
coefficient_matrix_setup(df)
main()