-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy patheasy_adam.cpp
41 lines (39 loc) · 1.04 KB
/
easy_adam.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
#include "micrograd.hpp"
using namespace microgradCpp;
/*
* MIT License
*
* Copyright (c) 2024 Sermet Pekin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files...
*/
/*
g++ -std=c++17 -Iinclude -O2 -o main easy_adam.cpp
*/
int main()
{
DatasetType dataset = get_iris();
shuffle(dataset);
double TRAIN_SIZE{0.8};
// Create MLP model
// Input: 4 features, hidden layers: [7,7], output: 3 classes
// Define the model and hyperparameters
MLP model(4, {7, 7, 3});
// Collect parameters from the model
auto params = model.parameters();
double learning_rate = 0.001;
int epochs = 1000;
// Initialize Adam optimizer
AdamOptimizer optimizer(params, learning_rate);
// Train and evaluate the model
// train_eval(dataset, TRAIN_SIZE, model, learning_rate, epochs);
train_eval(dataset, TRAIN_SIZE, model, optimizer, epochs);
return 0;
}
/*
Notes
-----------
g++ -std=c++17 -Iinclude -O2 -o main main_easy.cpp
// or
make run
*/