pcstyle

Untitled

Nov 6th, 2025
374
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.78 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3. from scipy.spatial.distance import cdist
  4. from sklearn.datasets import load_iris
  5. from sklearn.preprocessing import MinMaxScaler
  6. from sklearn.decomposition import PCA
  7.  
  8. def are_neighbors(grid2d, i, j):
  9.     """
  10.    odleglosc Chebyshev = 1 (8-neighborhood)
  11.    """
  12.     di = abs(grid2d[i, 0] - grid2d[j, 0])
  13.     dj = abs(grid2d[i, 1] - grid2d[j, 1])
  14.     return max(di, dj) == 1  # true jesli sasiaduja
  15.  
  16. def movmean(data, window):
  17.     """
  18.    srednia ruchoma
  19.    """
  20.     result = np.zeros_like(data)
  21.     half = window // 2
  22.     for i in range(len(data)):
  23.         start = max(0, i - half)
  24.         end = min(len(data), i + half + 1)
  25.         result[i] = np.mean(data[start:end])
  26.     return result
  27.  
  28. def som_iris_ex4():
  29.     rng = np.random.default_rng(7)
  30.     R, C = 12, 12  # wieksza siatka
  31.     M = R * C
  32.     iters = 4000  # wiecej iteracji dla wiekszych siatek
  33.     eta0 = 0.12
  34.     sigma0 = max(R, C) / 2  # poczatkowe sasiedztwo (globalne porzadkowanie)
  35.     sigma_final_list = [1, 2, 3]  # koncowe rozmiary sasiedztwa do testowania
  36.    
  37.     iris = load_iris()
  38.     X = iris.data
  39.     scaler = MinMaxScaler()
  40.     X = scaler.fit_transform(X)  # 150x4, kazda cecha do [0,1]
  41.     S = X.shape[0]
  42.    
  43.     print(f"loaded iris: {S} samples, {X.shape[1]} features")
  44.     print(f"grid: {R}x{C} = {M} neurons")
  45.     print(f"testing sigma_final: {sigma_final_list}")
  46.    
  47.     gx, gy = np.meshgrid(np.arange(1, C+1), np.arange(1, R+1))
  48.     grid2d = np.column_stack([gx.ravel(), gy.ravel()])
  49.    
  50.     fig1 = plt.figure(figsize=(10, 6), facecolor='white')
  51.     ax1 = fig1.add_subplot(111)
  52.     ax1.set_xlabel('Iteration')
  53.     ax1.set_ylabel('Topographic Error')
  54.     ax1.set_title(f'R={R}, C={C} — Effect of final σ in {{1,2,3}}')
  55.     ax1.grid(True)
  56.     ax1.box(True)
  57.    
  58.     results = []
  59.    
  60.     for k, sigma_final in enumerate(sigma_final_list):
  61.         print(f'\n=== Testing sigma_final = {sigma_final} ===')
  62.        
  63.         W = rng.random((M, X.shape[1]))  # w [0,1], ten sam skala co X
  64.        
  65.         topoErr = np.zeros(iters)
  66.        
  67.         tau = iters  # jedna stala czasowa przez caly przebieg
  68.        
  69.         for t in range(1, iters + 1):
  70.             sigma_t = sigma_final + (sigma0 - sigma_final) * np.exp(-t / tau)
  71.             eta_t = eta0 * np.exp(-t / tau)
  72.            
  73.             x = X[rng.integers(S), :]
  74.            
  75.             dists = np.sum((W - x) ** 2, axis=1)
  76.             idx = np.argsort(dists)
  77.             bmu1 = idx[0]
  78.             bmu2 = idx[1]
  79.            
  80.             dgrid2 = np.sum((grid2d - grid2d[bmu1, :]) ** 2, axis=1)
  81.             h = np.exp(-dgrid2 / (2 * sigma_t ** 2))
  82.             h = h / np.max(h)
  83.            
  84.             W = W + eta_t * (h[:, np.newaxis] * (x - W))
  85.            
  86.             topoErr[t-1] = 0 if are_neighbors(grid2d, bmu1, bmu2) else 1
  87.            
  88.             if t % 1000 == 0:
  89.                 print(f"  iteration {t}/{iters}, sigma={sigma_t:.3f}, eta={eta_t:.5f}")
  90.        
  91.         win = 50
  92.         topoErr_s = movmean(topoErr, win)
  93.        
  94.         results.append({
  95.             'sigma_final': sigma_final,
  96.             'W': W.copy(),
  97.             'topoErr': topoErr.copy(),
  98.             'topoErr_s': topoErr_s.copy()
  99.         })
  100.        
  101.         # rysuj krzywą TE
  102.         ax1.plot(range(1, iters + 1), topoErr_s,
  103.                 label=f'σ_final={sigma_final}', linewidth=1.5)
  104.    
  105.     ax1.legend(loc='northeast')
  106.     plt.tight_layout()
  107.     plt.show()
  108.    
  109.     fig2 = plt.figure(figsize=(15, 5), facecolor='white')
  110.    
  111.     for k, result in enumerate(results):
  112.         W = result['W']
  113.        
  114.         pca = PCA(n_components=2)
  115.         W2 = pca.fit_transform(W)
  116.        
  117.         ax = fig2.add_subplot(1, len(sigma_final_list), k + 1)
  118.         ax.set_aspect('equal')
  119.         ax.set_title(f'σ_final={result["sigma_final"]}')
  120.         ax.box(True)
  121.        
  122.         WX = W2[:, 0].reshape((R, C))
  123.         WY = W2[:, 1].reshape((R, C))
  124.        
  125.         # linie poziome
  126.         for r in range(R):
  127.             ax.plot(WX[r, :], WY[r, :], '-', linewidth=1.0, color='blue', alpha=0.6)
  128.        
  129.         # linie pionowe
  130.         for c in range(C):
  131.             ax.plot(WX[:, c], WY[:, c], '-', linewidth=1.0, color='blue', alpha=0.6)
  132.        
  133.         # male znaczniki dla neuronow
  134.         ax.plot(W2[:, 0], W2[:, 1], 'ko', markersize=2, markerfacecolor='k')
  135.    
  136.     plt.suptitle('Final Lattices Projected by PCA', fontsize=14)
  137.     plt.tight_layout()
  138.     plt.show()
  139.    
  140.     print('\n== Summary (lower TE is better) ==')
  141.     for result in results:
  142.         te_mean = np.mean(result['topoErr'][-500:])  # srednia z ostatnich 500
  143.         print(f'sigma_final={result["sigma_final"]} -> mean TE (last 500 iters): {te_mean:.4f}')
  144.  
  145. if __name__ == '__main__':
  146.     som_iris_ex4()
  147.  
  148.  
Advertisement
Add Comment
Please, Sign In to add comment