forked from weizushuai/sentinel1ice
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path03normalize_tf.py
More file actions
44 lines (37 loc) · 1.47 KB
/
03normalize_tf.py
File metadata and controls
44 lines (37 loc) · 1.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import os
import glob
import numpy as np
import scipy.stats as st
import matplotlib.pyplot as plt
from scipy.ndimage.filters import gaussian_filter
from sar2ice import normalize_texture_features
idir = '/files/sentinel1a/odata/'
normFilePrefix = 'norm01'
percentile = .1
gaus_size = 0.2 # c.a. 3 pixels
# apply normalization and clipping
# load TFs, load presaved logMeanStd, normalize and save
for pol in ['HH', 'HV']:
# name of output file to keep normalization
normFile = os.path.join(idir, normFilePrefix + pol + '.npy')
# input files with TFs for this pol
ifiles = sorted(glob.glob(idir + '*%s_har.npz' % pol))
for ifile in ifiles:
# read TFs from HH or HV and keep in hhhvTF
tfs = np.load(ifile)['tfs']
tfsNorm = normalize_texture_features(tfs, normFile)
# get min, max from histogram and clip
for i, tf in enumerate(tfsNorm):
if len(tf[np.isfinite(tf)]) == 0:
continue
tfMin, tfMax = np.percentile(tf[np.isfinite(tf)],
(percentile, 100-percentile))
# clip outliers
tfsNorm[i, tf < tfMin] = np.nan
tfsNorm[i, tf > tfMax] = np.nan
# remove 2 NaN neighbours
tfGaus = gaussian_filter(tfsNorm[i], gaus_size)
tfsNorm[i, np.isnan(tfGaus)] = np.nan
# save to output file
ofile = ifile.replace('_har', '_har_norm')
np.savez_compressed(ofile, tfsNorm=tfsNorm)