Skip to content
  • P
    Projects
  • G
    Groups
  • S
    Snippets
  • Help

rufort / IAS0360_lab_excercises_2024

  • This project
    • Loading...
  • Sign in
Go to a project
  • Project
  • Repository
  • Issues 0
  • Merge Requests 0
  • Pipelines
  • Wiki
  • Snippets
  • Members
  • Activity
  • Graph
  • Charts
  • Create a new issue
  • Commits
  • Issue Boards
  • Files
  • Commits
  • Branches
  • Tags
  • Contributors
  • Graph
  • Compare
  • Charts
Switch branch/tag
  • IAS0360_lab_excercises_2024
  • lab_4
  • lab4.cpp
Find file
BlameHistoryPermalink
  • Nazrul_being's avatar
    Added lab 4 · a4da20db
    Nazrul_being committed 7 months ago
    a4da20db
lab4.cpp 5.31 KB
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
#include <iostream>
#include <vector>
#include "../lib/includes/NeuralNetwork.h"

#define NUM_OF_FEATURES    3   // Number of input features (e.g., temperature, humidity, air quality)
#define NUM_OF_HIDDEN_NODES 3  // Number of neurons in the hidden layer
#define NUM_OF_OUTPUT_NODES 1  // Number of output nodes (e.g., predicted class)

double learning_rate = 0.01;  // Learning rate for updating weights (not used directly in this example)

// Intermediate outputs and storage for the hidden layer
std::vector<double> hiddenLayerOutput(NUM_OF_HIDDEN_NODES);  // Output of the hidden layer (for each example)
std::vector<double> hiddenLayerBias = {0, 0, 0};  // Initialize biases for the hidden layer neurons
std::vector<double> hiddenLayerWeightedSum(NUM_OF_HIDDEN_NODES);  // Weighted sum (z1) before applying activation function

// Weights from input layer to hidden layer
std::vector<std::vector<double>> inputToHiddenWeights = {
    {0.25, 0.5, 0.05},  // Weights for hidden neuron 1
    {0.8, 0.82, 0.3},   // Weights for hidden neuron 2
    {0.5, 0.45, 0.19}   // Weights for hidden neuron 3
};

// Intermediate outputs and storage for the output layer
std::vector<double> outputLayerBias = {0};  // Initialize bias for the output neuron
std::vector<double> outputLayerWeightedSum(NUM_OF_OUTPUT_NODES);  // Weighted sum (z2) before applying activation function

// Weights from hidden layer to output layer
std::vector<std::vector<double>> hiddenToOutputWeights = {
    {0.48, 0.73, 0.03}  // Weights for the output neuron
};

// Predicted values after applying the sigmoid activation function
std::vector<double> predictedOutput(NUM_OF_OUTPUT_NODES);  // yhat (predicted values)

// Training data (normalized input features and expected output)
std::vector<std::vector<double>> normalizedInput(2, std::vector<double>(NUM_OF_FEATURES));  // Normalized input features for training
std::vector<std::vector<double>> expectedOutput = {{1}};  // Expected output (labels) for each training example

// Task 1: Perform a forward pass through the network
void task1() {
    NeuralNetwork nn;

    // Raw input features before normalization
    std::vector<std::vector<double>> rawInput = {
        {23.0, 40.0, 100.0},  // Example 1: temp, hum, air_q
        {22.0, 39.0, 101.0}   // Example 2
    };

    // Normalize the raw input data
    nn.normalizeData2D(rawInput, normalizedInput);
    std::cout << "Normalized training input:\n";
    nn.printMatrix(normalizedInput.size(), NUM_OF_FEATURES, normalizedInput);

    // Step 1: Calculate the weighted sum (z1) for the hidden layer
    std::vector<double> flattenedInputToHiddenWeights;
    for (const auto& row : inputToHiddenWeights) {
        flattenedInputToHiddenWeights.insert(flattenedInputToHiddenWeights.end(), row.begin(), row.end());
    }
    nn.multipleInputMultipleOutput(normalizedInput[0], flattenedInputToHiddenWeights, hiddenLayerBias, hiddenLayerWeightedSum, NUM_OF_FEATURES, NUM_OF_HIDDEN_NODES);
    std::cout << "Output vector (z1) for hidden layer:\n";
    for (double val : hiddenLayerWeightedSum) {
        std::cout << val << " ";
    }
    std::cout << "\n";

    // Step 2: Apply ReLU activation to the hidden layer's weighted sum
    nn.vectorReLU(hiddenLayerWeightedSum, hiddenLayerOutput);

    // Step 3: Calculate the weighted sum (z2) for the output layer
    std::vector<double> flattenedHiddenToOutputWeights;
    for (const auto& row : hiddenToOutputWeights) {
        flattenedHiddenToOutputWeights.insert(flattenedHiddenToOutputWeights.end(), row.begin(), row.end());
    }
    nn.multipleInputMultipleOutput(hiddenLayerOutput, flattenedHiddenToOutputWeights, outputLayerBias, outputLayerWeightedSum, NUM_OF_HIDDEN_NODES, NUM_OF_OUTPUT_NODES);
    std::cout << "Output vector (z2) for output layer:\n";
    std::cout << outputLayerWeightedSum[0] << "\n";

    // Step 4: Apply Sigmoid activation to the output layer's weighted sum
    nn.vectorSigmoid(outputLayerWeightedSum, predictedOutput);
    std::cout << "Predicted output (yhat) after Sigmoid activation:\n";
    std::cout << predictedOutput[0] << "\n";

    // Step 5: Compute the cost (logistic regression cost function)
    double cost = nn.computeCost(1, {predictedOutput}, expectedOutput);
    std::cout << "Cost: " << cost << "\n";
}

// Task 2: Save and load the network's state
void task2() {
    NeuralNetwork nn;
    const std::string filename = "network_save.txt";

    // Save the network to a file
    nn.saveNetwork(filename, NUM_OF_FEATURES, NUM_OF_HIDDEN_NODES, NUM_OF_OUTPUT_NODES, inputToHiddenWeights, hiddenLayerBias, hiddenToOutputWeights, outputLayerBias);

    // Clear the weights and biases to simulate loading from a file
    for (auto& row : inputToHiddenWeights) {
        std::fill(row.begin(), row.end(), 0.0);
    }
    std::fill(hiddenLayerBias.begin(), hiddenLayerBias.end(), 0.0);

    for (auto& row : hiddenToOutputWeights) {
        std::fill(row.begin(), row.end(), 0.0);
    }
    std::fill(outputLayerBias.begin(), outputLayerBias.end(), 0.0);

    std::cout << "Network weights and biases cleared to zero.\n";

    // Load the network from the file
    nn.loadNetwork(filename, NUM_OF_FEATURES, NUM_OF_HIDDEN_NODES, NUM_OF_OUTPUT_NODES, inputToHiddenWeights, hiddenLayerBias, hiddenToOutputWeights, outputLayerBias);

    // Execute the network after loading the saved state
    task1();
}

int main() {
    task1();
    task2();
    return 0;
}