forked from Willcox-Research-Group/ROM-OpInf-Combustion-2D
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathstep2b_basis.py
151 lines (120 loc) · 5.04 KB
/
step2b_basis.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# step2b_basis.py
"""Compute the POD basis (the dominant left singular vectors) of the lifted,
scaled snapshot training data. Save the basis and information on how the
data was scaled.
Examples
--------
# Use 10,000 snapshots to compute a rank-50 POD basis.
$ python3 step2b_basis.py 10000 50
# Use 20,000 snapshots to compute a rank-100 POD basis.
$ python3 step2b_basis.py 20000 100
Loading Results
---------------
>>> import utils
>>> trainsize = 10000 # Number of snapshots used as training data.
>>> num_modes = 44 # Number of POD modes.
>>> V, scales = utils.load_basis(trainsize, num_modes)
Command Line Arguments
----------------------
"""
import h5py
import logging
import scipy.linalg as la
import rom_operator_inference as opinf
import config
import utils
def compute_and_save_pod_basis(num_modes, training_data, qbar, scales):
"""Compute and save the POD basis via a randomized SVD.
Parameters
----------
num_modes : int
Number of POD modes to compute.
training_data : (NUM_ROMVARS*DOF,trainsize) ndarray
Training snapshots to take the SVD of.
qbar : (NUM_ROMVARS*DOF,) ndarray
Mean snapshot of the scaled training data.
scales : (NUM_ROMVARS,2) ndarray
Info on how the snapshot data was scaled.
Returns
-------
V : (NUM_ROMVARS*DOF,r) ndarray
POD basis of rank r.
"""
# Compute the randomized SVD from the training data.
with utils.timed_block(f"Computing {num_modes}-component randomized SVD"):
V, svdvals = opinf.pre.pod_basis(training_data, r=num_modes,
mode="randomized",
n_iter=15, random_state=42)
# Save the POD basis.
save_path = config.basis_path(training_data.shape[1])
with utils.timed_block("Saving POD basis"):
with h5py.File(save_path, 'w') as hf:
hf.create_dataset("basis", data=V)
hf.create_dataset("svdvals", data=svdvals)
hf.create_dataset("mean", data=qbar)
hf.create_dataset("scales", data=scales)
logging.info(f"POD bases of rank {num_modes} saved to {save_path}.\n")
return V
def compute_and_save_all_svdvals(training_data):
"""Compute and save the singular values corresponding to the *full* POD
basis for the training data.
Parameters
----------
training_data : (NUM_ROMVARS*DOF,trainsize) ndarray
Training snapshots to take the SVD of.
Returns
-------
svdvals : (trainsize,) ndarray
Singular values for the full POD basis.
"""
# Compute the DENSE SVD of the training data to get the singular values.
with utils.timed_block("Computing *dense* SVD for singular values"):
svdvals = la.svdvals(training_data,
overwrite_a=True, check_finite=False)
# Save the POD basis.
save_path = config.basis_path(training_data.shape[1])
save_path = save_path.replace(config.BASIS_FILE, "svdvals.h5")
with utils.timed_block("Saving singular values"):
with h5py.File(save_path, 'w') as hf:
hf.create_dataset("svdvals", data=svdvals)
logging.info(f"Singular values saved to {save_path}.\n")
return svdvals
def main(trainsize, num_modes):
"""Compute the POD basis (dominant left singular values) of a set of
lifted, scaled snapshot training data and save the basis and the
corresponding singular values.
WARNING: This will OVERWRITE any existing basis for this `trainsize`.
Parameters
----------
trainsize : int
The number of snapshots to use in the computation. There must
exist a file of exactly `trainsize` lifted, scaled snapshots
(see step2a_transform.py).
num_modes : int or list(int)
The number of POD modes (left singular vectors) to retain.
"""
utils.reset_logger(trainsize)
# Load the first `trainsize` lifted, scaled snapshot data.
training_data, _, qbar, scales = utils.load_scaled_data(trainsize)
if num_modes == -1:
# Secret mode! Compute all singular values (EXPENSIVE).
return compute_and_save_all_svdvals(training_data)
else:
# Compute and save the (randomized) SVD from the training data.
return compute_and_save_pod_basis(num_modes,
training_data, qbar, scales)
# =============================================================================
if __name__ == "__main__":
# Set up command line argument parsing.
import argparse
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.usage = f""" python3 {__file__} --help
python3 {__file__} TRAINSIZE MODES"""
parser.add_argument("trainsize", type=int,
help="number of snapshots in the training data")
parser.add_argument("modes", type=int,
help="number of left singular vectors/values to save")
# Do the main routine.
args = parser.parse_args()
main(args.trainsize, args.modes)