-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
84 lines (65 loc) · 2.55 KB
/
main.py
File metadata and controls
84 lines (65 loc) · 2.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import numpy as np
from tkba import TKBA
from visualization import plot_tkba
import matplotlib.pyplot as plt
from scipy.io import loadmat
def main():
# Parameters
m_iter = 1 # Number of iterations
nr = 0.1 # Noise Rate [0,1]
print("Loading Dataset...")
mat_data = loadmat('2D_ClusteringDATASET.mat')
# Extract the data
print("Available variables in the .mat file:", list(mat_data.keys()))
if 'data' in mat_data:
data = mat_data['data']
elif 'DATA' in mat_data:
data = mat_data['DATA']
else:
print("Dataset variable not found")
return
print(f"Original dataset shape: {data.shape}")
# Extract only the first two columns if there are more than 2
if data.shape[1] >= 2:
data = data[:, :2] # Take first two columns as in the MATLAB code
else:
print("Error: Data must have at least 2 dimensions for clustering")
return
print(f"Using dataset with {data.shape[0]} samples")
# Add some noise if requested
if nr > 0:
noise_data = np.random.rand(int(data.shape[0] * nr), data.shape[1])
# Replace first portion of data with noise
n_noise = noise_data.shape[0]
data[:n_noise, :] = noise_data
# Normalize data to [0, 1] range as in original MATLAB code
data_min = data.min(axis=0)
data_max = data.max(axis=0)
data = (data - data_min) / (data_max - data_min + 1e-8) # Add small epsilon to avoid division by zero
# Parameters of TKBA
cim_sig = 0.05 # Kernel Bandwidth for CIM
kbr_sig = 1.0 # Kernel Bandwidth for KBR
max_cim = 0.2 # Vigilance Parameter by CIM [0~1]
lambda_param = 400 # Interval for Node deletion and topology construction
for nitr in range(m_iter):
print(f'Iterations: {nitr+1}/{m_iter}')
# Randomize data order
ran = np.random.permutation(data.shape[0])
data_random = data[ran, :]
# Create and fit the TKBA model
tkba_net = TKBA(
cim_sig=cim_sig,
kbr_sig=kbr_sig,
max_cim=max_cim,
lambda_param=lambda_param
)
tkba_net.fit(data_random)
# Print results
print(f"Number of clusters found: {tkba_net.num_clusters}")
print(f"Final edge matrix shape: {tkba_net.edge.shape}")
print(f"Weight matrix shape: {tkba_net.weight.shape}")
# Plot the results
plot_tkba(data_random, tkba_net)
plt.show()
if __name__ == "__main__":
main()