-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
67 lines (56 loc) · 2.18 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#%%
from pytorch_deep_learning_lib.data_module import TabularDataModule
from pytorch_deep_learning_lib.model import RegressionModel
from pytorch_deep_learning_lib.train import train_model
from pytorch_deep_learning_lib.evaluate import evaluate_model
import pandas as pd
data_choice = 3
# Load the dataset based on choice
if data_choice == 1:
from sklearn.datasets import fetch_california_housing
california = fetch_california_housing()
df = pd.DataFrame(california.data, columns=california.feature_names)
df['MedHousVal'] = california.target
target_column = 'MedHousVal'
drop_columns = []
elif data_choice == 2:
url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/00242/ENB2012_data.xlsx'
df = pd.read_excel(url)
df.columns = ['Relative_Compactness', 'Surface_Area', 'Wall_Area', 'Roof_Area',
'Overall_Height', 'Orientation', 'Glazing_Area',
'Glazing_Area_Distribution', 'Heating_Load', 'Cooling_Load']
target_column = 'Heating_Load'
drop_columns = []
elif data_choice == 3:
url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/concrete/compressive/Concrete_Data.xls'
df = pd.read_excel(url)
df.columns = ['Cement', 'Blast_Furnace_Slag', 'Fly_Ash', 'Water',
'Superplasticizer', 'Coarse_Aggregate', 'Fine_Aggregate',
'Age', 'Concrete_Compressive_Strength']
target_column = 'Concrete_Compressive_Strength'
drop_columns = []
# Hyperparameters
max_epochs = 200
batch_size = 16
test_size = 0.2
hidden_dims = [128, 64, 32]
lr = 0.0001
dropout_rate = 0.0
patience = 10
# Load your dataset
data_module = TabularDataModule(df, target_column, drop_columns,
batch_size=batch_size, test_size=test_size
)
data_module.prepare_data()
data_module.setup()
# Define and train the model
model = RegressionModel(
input_dim=len(data_module.selected_features),
hidden_dims=hidden_dims,
lr=lr,
dropout_rate=dropout_rate
)
trained_model = train_model(data_module, model, max_epochs=max_epochs, patience=patience)
# Evaluate the model
evaluate_model(trained_model, data_module)
# %%