-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathq1_answer.py
More file actions
174 lines (126 loc) · 4.41 KB
/
q1_answer.py
File metadata and controls
174 lines (126 loc) · 4.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
# Algorithm Design Foundations
#
# Final Exam, Question 1
#
# Student Name: Hamed Araab
# Student Number: 9925003
from math import log
from random import sample
class KMeansClustering:
"""
Performs the k-means clustering algorithm on a list of points with the
specified number of clusters (`k`).
### Notes:
- The algorithm will be performed `max(100, int(math.log(len(points))))`
times and the result with the lowest Within-Cluster Sum of Squares (`WCSS`)
is selected as the final output.
- If any cluster becomes empty at any point, that iteration of performing
the k-means algorithm will be aborted.
"""
@staticmethod
def _get_distance(point1, point2):
"""
Returns the Euclidean distance between two points.
"""
if len(point1) != len(point2):
raise ValueError("Both points must have the same number of dimensions.")
return sum((x - y) ** 2 for x, y in zip(point1, point2)) ** 0.5
def __init__(self, points, k):
self._points = points
self._k = k
self._run()
@property
def points(self):
return self._points
@property
def k(self):
return self._k
@property
def means(self):
return self._means
@property
def clusters(self):
return self._clusters
@property
def min_inter_cluster_distance(self):
"""
The shortest distance inter-cluster distance.
"""
min_distance = float("inf")
for i in range(self._k):
for j in range(i + 1, self._k):
for point_i in self._clusters[i]:
for point_j in self._clusters[j]:
distance = KMeansClustering._get_distance(point_i, point_j)
min_distance = min(distance, min_distance)
return min_distance
def _get_initial_means(self):
"""
Returns `k` random points as the initial values for means.
"""
return sample(self._points, k=self._k)
def _get_clusters(self):
"""
Returns a list of clusters with each containing at least one point.
"""
clusters = [[] for _ in range(self._k)]
for point in self._points:
distances = [
KMeansClustering._get_distance(point, mean) for mean in self._means
]
nearest_mean_index = distances.index(min(distances))
clusters[nearest_mean_index].append(point)
return clusters
def _get_means(self):
"""
Returns the means of all clusters.
"""
return [
[sum(coords) / len(cluster) for coords in zip(*cluster)]
for cluster in self._clusters
]
def _get_wcss(self):
"""
Returns the Within-Cluster Sum of Squares (`WCSS`) metric for the current
clustering result.
"""
return sum(
(x - y) ** 2
for cluster, mean in zip(self._clusters, self._means)
for point in cluster
for x, y in zip(point, mean)
)
def _run(self):
"""
Finds the best clustering result for this instance.
"""
iterations = max(100, int(log(len(self._points))))
# Keeps a dictionary of WCSS's and clustering results as keys and
# values.
history = {}
for _ in range(iterations):
self._prev_means = None
self._means = self._get_initial_means()
try:
while self._means != self._prev_means:
self._clusters = self._get_clusters()
self._prev_means = self._means
self._means = self._get_means()
wcss = self._get_wcss()
history[wcss] = self._clusters
except ZeroDivisionError:
continue
# Assign the clustering result with the lowest WCSS to the final
# clustering result.
self._clusters = history[min(history.keys())]
self._means = self._get_means()
def main():
# Start of getting user inputs
points_count = int(input())
points = [list(map(float, input().split(" "))) for _ in range(points_count)]
clusters_count = int(input())
# End of getting user inputs
clustering = KMeansClustering(points, clusters_count)
print("{:.7f}".format(clustering.min_inter_cluster_distance))
if __name__ == "__main__":
main()