Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ project(bellshade_C
set(CMAKE_C_STANDARD 11)
set(CMAKE_C_STANDARD_REQUIRED YES)
if (MSVC)
add_compile_definitions(_CRT_SECURE_NO_WARNIGS)
add_compile_definitions(_CRT_SECURE_NO_WARNINGS)
endif (MSVC)

find_library(MATH_LIBRARY m)
Expand Down Expand Up @@ -43,6 +43,7 @@ add_subdirectory(hashing)
add_subdirectory(konversi)
add_subdirectory(algoritma/sorting)
add_subdirectory(algoritma/searching)
add_subdirectory(project/NeuralNetwork)

cmake_policy(SET CMP0054 NEW)
cmake_policy(SET CMP0057 NEW)
Expand Down
16 changes: 16 additions & 0 deletions project/NeuralNetwork/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
file(GLOB NN_SOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.c)
add_executable(neural_network ${NN_SOURCES})

if(OpenMP_C_FOUND)
target_link_libraries(neural_network OpenMP::OpenMP_C)
endif()

if(MATH_LIBRARY)
target_link_libraries(neural_network ${MATH_LIBRARY})
endif()

set_target_properties(neural_network PROPERTIES C_STANDARD 11 C_STANDARD_REQUIRED YES)

install(TARGETS neural_network DESTINATION "bin/project/NeuralNetwork")
install(FILES dataset/iris.csv DESTINATION "bin/project/NeuralNetwork/dataset")

152 changes: 152 additions & 0 deletions project/NeuralNetwork/iris.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
https://archive.ics.uci.edu/ml/datasets/iris
sepal length in cm,sepal width in cm,petal length in cm,petal width in cm
5.1,3.5,1.4,.2,0
4.9,3,1.4,.2,0
4.7,3.2,1.3,.2,0
4.6,3.1,1.5,.2,0
5,3.6,1.4,.2,0
5.4,3.9,1.7,.4,0
4.6,3.4,1.4,.3,0
5,3.4,1.5,.2,0
4.4,2.9,1.4,.2,0
4.9,3.1,1.5,.1,0
5.4,3.7,1.5,.2,0
4.8,3.4,1.6,.2,0
4.8,3,1.4,.1,0
4.3,3,1.1,.1,0
5.8,4,1.2,.2,0
5.7,4.4,1.5,.4,0
5.4,3.9,1.3,.4,0
5.1,3.5,1.4,.3,0
5.7,3.8,1.7,.3,0
5.1,3.8,1.5,.3,0
5.4,3.4,1.7,.2,0
5.1,3.7,1.5,.4,0
4.6,3.6,1,.2,0
5.1,3.3,1.7,.5,0
4.8,3.4,1.9,.2,0
5,3,1.6,.2,0
5,3.4,1.6,.4,0
5.2,3.5,1.5,.2,0
5.2,3.4,1.4,.2,0
4.7,3.2,1.6,.2,0
4.8,3.1,1.6,.2,0
5.4,3.4,1.5,.4,0
5.2,4.1,1.5,.1,0
5.5,4.2,1.4,.2,0
4.9,3.1,1.5,.2,0
5,3.2,1.2,.2,0
5.5,3.5,1.3,.2,0
4.9,3.6,1.4,.1,0
4.4,3,1.3,.2,0
5.1,3.4,1.5,.2,0
5,3.5,1.3,.3,0
4.5,2.3,1.3,.3,0
4.4,3.2,1.3,.2,0
5,3.5,1.6,.6,0
5.1,3.8,1.9,.4,0
4.8,3,1.4,.3,0
5.1,3.8,1.6,.2,0
4.6,3.2,1.4,.2,0
5.3,3.7,1.5,.2,0
5,3.3,1.4,.2,0
7,3.2,4.7,1.4,1
6.4,3.2,4.5,1.5,1
6.9,3.1,4.9,1.5,1
5.5,2.3,4,1.3,1
6.5,2.8,4.6,1.5,1
5.7,2.8,4.5,1.3,1
6.3,3.3,4.7,1.6,1
4.9,2.4,3.3,1,1
6.6,2.9,4.6,1.3,1
5.2,2.7,3.9,1.4,1
5,2,3.5,1,1
5.9,3,4.2,1.5,1
6,2.2,4,1,1
6.1,2.9,4.7,1.4,1
5.6,2.9,3.6,1.3,1
6.7,3.1,4.4,1.4,1
5.6,3,4.5,1.5,1
5.8,2.7,4.1,1,1
6.2,2.2,4.5,1.5,1
5.6,2.5,3.9,1.1,1
5.9,3.2,4.8,1.8,1
6.1,2.8,4,1.3,1
6.3,2.5,4.9,1.5,1
6.1,2.8,4.7,1.2,1
6.4,2.9,4.3,1.3,1
6.6,3,4.4,1.4,1
6.8,2.8,4.8,1.4,1
6.7,3,5,1.7,1
6,2.9,4.5,1.5,1
5.7,2.6,3.5,1,1
5.5,2.4,3.8,1.1,1
5.5,2.4,3.7,1,1
5.8,2.7,3.9,1.2,1
6,2.7,5.1,1.6,1
5.4,3,4.5,1.5,1
6,3.4,4.5,1.6,1
6.7,3.1,4.7,1.5,1
6.3,2.3,4.4,1.3,1
5.6,3,4.1,1.3,1
5.5,2.5,4,1.3,1
5.5,2.6,4.4,1.2,1
6.1,3,4.6,1.4,1
5.8,2.6,4,1.2,1
5,2.3,3.3,1,1
5.6,2.7,4.2,1.3,1
5.7,3,4.2,1.2,1
5.7,2.9,4.2,1.3,1
6.2,2.9,4.3,1.3,1
5.1,2.5,3,1.1,1
5.7,2.8,4.1,1.3,1
6.3,3.3,6,2.5,2
5.8,2.7,5.1,1.9,2
7.1,3,5.9,2.1,2
6.3,2.9,5.6,1.8,2
6.5,3,5.8,2.2,2
7.6,3,6.6,2.1,2
4.9,2.5,4.5,1.7,2
7.3,2.9,6.3,1.8,2
6.7,2.5,5.8,1.8,2
7.2,3.6,6.1,2.5,2
6.5,3.2,5.1,2,2
6.4,2.7,5.3,1.9,2
6.8,3,5.5,2.1,2
5.7,2.5,5,2,2
5.8,2.8,5.1,2.4,2
6.4,3.2,5.3,2.3,2
6.5,3,5.5,1.8,2
7.7,3.8,6.7,2.2,2
7.7,2.6,6.9,2.3,2
6,2.2,5,1.5,2
6.9,3.2,5.7,2.3,2
5.6,2.8,4.9,2,2
7.7,2.8,6.7,2,2
6.3,2.7,4.9,1.8,2
6.7,3.3,5.7,2.1,2
7.2,3.2,6,1.8,2
6.2,2.8,4.8,1.8,2
6.1,3,4.9,1.8,2
6.4,2.8,5.6,2.1,2
7.2,3,5.8,1.6,2
7.4,2.8,6.1,1.9,2
7.9,3.8,6.4,2,2
6.4,2.8,5.6,2.2,2
6.3,2.8,5.1,1.5,2
6.1,2.6,5.6,1.4,2
7.7,3,6.1,2.3,2
6.3,3.4,5.6,2.4,2
6.4,3.1,5.5,1.8,2
6,3,4.8,1.8,2
6.9,3.1,5.4,2.1,2
6.7,3.1,5.6,2.4,2
6.9,3.1,5.1,2.3,2
5.8,2.7,5.1,1.9,2
6.8,3.2,5.9,2.3,2
6.7,3.3,5.7,2.5,2
6.7,3,5.2,2.3,2
6.3,2.5,5,1.9,2
6.5,3,5.2,2,2
6.2,3.4,5.4,2.3,2
5.9,3,5.1,1.8,2
146 changes: 146 additions & 0 deletions project/NeuralNetwork/main.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@

#include "nn.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>

int
main(void)
{
srand(time(NULL));

printf("=============[ NEURAL NETWORK - IRIS CLASSIFICATION ]============\n");

// Inisialisasi arena memory
struct MemoryArena arena = arena_create(1024 * 1024 * 10); // 10MB

// Load dataset iris
printf("・ Loading dataset iris.csv...\n");
struct Matrix dataset = dataset_load_from_csv(&arena, "iris.csv", 1); // Skip header
printf("・ Dataset loaded: %zu samples, %zu features\n", dataset.num_rows, dataset.num_columns);

// Normalisasi data input (4 kolom pertama)
printf("・ Normalizing input features...\n");
matrix_normalize_minmax(dataset, 4, 0.0f, 1.0f);

// Shuffle dataset
printf("・ Shuffling dataset...\n");
matrix_shuffle_rows(dataset);

// Split dataset: 80% training, 20% testing
size_t train_size = (size_t)(dataset.num_rows * 0.8f);
size_t test_size = dataset.num_rows - train_size;

struct Matrix train_data = matrix_create_row_slice(dataset, 0, train_size);
struct Matrix test_data = matrix_create_row_slice(dataset, train_size, test_size);

printf("-- Training set: %zu samples\n", train_data.num_rows);
printf("-- Test set: %zu samples\n", test_data.num_rows);

// Definisi arsitektur neural network
// Input: 4 features (sepal length, sepal width, petal length, petal width)
// Hidden: 8 neurons
// Output: 3 classes (setosa, versicolor, virginica)
size_t arch[] = {4, 8, 3};
size_t arch_count = sizeof(arch) / sizeof(arch[0]);

printf("\n・ Creating neural network with architecture: ");
for (size_t i = 0; i < arch_count; ++i) {
printf("%zu", arch[i]);
if (i < arch_count - 1)
printf(" - ");
}
printf("\n");

// Alokasi neural network
struct NeuralNetwork nn = neural_network_allocate(&arena, arch, arch_count);

// Initialize weights dengan nilai random
printf("・ Initializing random weights...\n");
neural_network_randomize_weights(nn, -1.0f, 1.0f);

// Test sebelum training
printf("\n・ Before training:\n");
printf("-- Training accuracy: %.2f%%\n", 100.0f * neural_network_calculate_accuracy(nn, train_data));
printf("-- Test accuracy: %.2f%%\n", 100.0f * neural_network_calculate_accuracy(nn, test_data));
printf("-- Training cost: %.4f\n", neural_network_calculate_accuracy(nn, train_data));

// Training parameters
size_t epochs = 1000;
size_t batch_size = 32;
float learning_rate = 0.1f;

printf("\n・ Training parameters:\n");
printf("-- Epochs: %zu\n", epochs);
printf("-- Batch size: %zu\n", batch_size);
printf("-- Learning rate: %.3f\n", learning_rate);
printf("\n======================[ STARTING TRAINING ]======================\n");

// Create separate arena for temporary calculations during training
struct MemoryArena temp_arena =
arena_create(1024 * 1024 * 5); // 5MB for temporary calculations

// Training loop
for (size_t epoch = 0; epoch < epochs; ++epoch) {
// Shuffle training data setiap epoch
matrix_shuffle_rows(train_data);

struct BatchProcessor batch = {0};
while (!batch.is_epoch_finished) {
batch_process_training_data(&temp_arena, &batch, batch_size, nn, train_data, learning_rate);
arena_reset(&temp_arena); // Only reset temporary arena
}

// Print progress setiap 100 epoch
if ((epoch + 1) % 100 == 0 || epoch == 0 || epoch == epochs - 1) {
float train_acc = neural_network_calculate_accuracy(nn, train_data);
float test_acc = neural_network_calculate_accuracy(nn, test_data);
float cost = neural_network_calculate_cost(nn, train_data);

printf("Epoch %4zu | Cost: %.4f | Train Acc: %.2f%% | Test Acc: %.2f%%\n",
epoch + 1, cost, 100.0f * train_acc, 100.0f * test_acc);
}
}

printf("======================[ TRAINING COMPLETED ]=====================\n\n");

// Final evaluation
printf("・ Final Results\n");
float final_train_acc = neural_network_calculate_accuracy(nn, train_data);
float final_test_acc = neural_network_calculate_accuracy(nn, test_data);
float final_cost = neural_network_calculate_cost(nn, train_data);

printf("-- Final training accuracy: %.2f%%\n", 100.0f * final_train_acc);
printf("-- Final test accuracy: %.2f%%\n", 100.0f * final_test_acc);
printf("-- Final training cost: %.4f\n", final_cost);

// Demo prediksi dengan beberapa sample dari test set
printf("\n======================[ SAMPLE PREDICTIONS ]=====================\n");
printf("・ Actual -> Predicted (Confidence)\n");
printf("・ 0 = Setosa | 1 = Versicolor | 2 = Virginica\n");
printf("-----------------------------------------------------------------\n");

for (size_t i = 0; i < 10 && i < test_data.num_rows; ++i) {
struct Row sample = matrix_get_row(test_data, i);
struct Row input = row_create_slice(sample, 0, 4);
struct Row target = row_create_slice(sample, 4, 3);

// Copy input ke network
row_copy_data(nn.activation_vectors[0], input);
neural_network_forward_pass(nn);

size_t actual = row_find_max_index(target);
size_t predicted = row_find_max_index(nn.activation_vectors[nn.total_layers - 1]);
float confidence = row_at(nn.activation_vectors[nn.total_layers - 1], predicted);

printf(" %zu -> %zu (%.3f) %s\n", actual, predicted, confidence,
(actual == predicted) ? "✓" : "✗");
}

printf("\n・ Classification Complete\n");

return 0;
}

/* vim: set ts=4 sw=4 sts=4 et */

Loading
Loading