ICode9

精准搜索请尝试: 精确搜索
首页 > 其他分享> 文章详细

简单神经网络

2022-05-20 00:00:29  阅读:114  来源: 互联网

标签:std ++ hideLayer 神经网络 outLayer value 简单 size


简单神经网络:输入层2个神经元 隐层4个神经元 输出层预测
#include "pch.h"
#include <iostream>
#include <cmath>
#include <vector>
#include <fstream>
#include <random>



#define INNODE 2
#define HIDENODE 4
#define OUTNODE 1

double rate = 0.8;
double threshold = 1e-4;
size_t mosttimes = 1e6;

struct Sample {
    std::vector<double> in, out;
};

struct Node {
    double value{}, bias{}, bias_delta{};
    std::vector<double> weight, weight_delta;
};

namespace utils {

    inline double sigmoid(double x) {
        double res = 1.0 / (1.0 + std::exp(-x));
        return res;
    }

    std::vector<double> getFileData(std::string filename) {
        std::vector<double> res;

        std::ifstream in(filename);
        if (in.is_open()) {
            while (!in.eof()) {
                double buffer;
                in >> buffer;
                res.push_back(buffer);
            }
            in.close();
        }
        else {
            std::cout << "Error in reading " << filename.c_str() << std::endl;
        }

        return res;
    }

    std::vector<Sample> getTrainData(std::string filename) {
        std::vector<Sample> res;

        std::vector<double> buffer = getFileData(filename);

        for (size_t i = 0; i < buffer.size(); i += INNODE + OUTNODE) {
            Sample tmp;
            for (size_t t = 0; t < INNODE; t++) {
                tmp.in.push_back(buffer[i + t]);
            }
            for (size_t t = 0; t < OUTNODE; t++) {
                tmp.out.push_back(buffer[i + INNODE + t]);
            }
            res.push_back(tmp);
        }

        return res;
    }

    std::vector<Sample> getTestData(std::string filename) {
        std::vector<Sample> res;

        std::vector<double> buffer = getFileData(filename);

        for (size_t i = 0; i < buffer.size(); i += INNODE) {
            Sample tmp;
            for (size_t t = 0; t < INNODE; t++) {
                tmp.in.push_back(buffer[i + t]);
            }
            res.push_back(tmp);
        }

        return res;
    }

}

Node *inputLayer[INNODE], *hideLayer[HIDENODE], *outLayer[OUTNODE];

inline void init() {
    std::mt19937 rd;
    rd.seed(std::random_device()());

    std::uniform_real_distribution<double> distribution(-1, 1);

    for (size_t i = 0; i < INNODE; i++) {
        ::inputLayer[i] = new Node();
        for (size_t j = 0; j < HIDENODE; j++) {
            ::inputLayer[i]->weight.push_back(distribution(rd));
            ::inputLayer[i]->weight_delta.push_back(0.f);
        }
    }

    for (size_t i = 0; i < HIDENODE; i++) {
        ::hideLayer[i] = new Node();
        ::hideLayer[i]->bias = distribution(rd);
        for (size_t j = 0; j < OUTNODE; j++) {
            ::hideLayer[i]->weight.push_back(distribution(rd));
            ::hideLayer[i]->weight_delta.push_back(0.f);
        }
    }

    for (size_t i = 0; i < OUTNODE; i++) {
        ::outLayer[i] = new Node();
        ::outLayer[i]->bias = distribution(rd);
    }

}

inline void reset_delta() {

    for (size_t i = 0; i < INNODE; i++) {
        ::inputLayer[i]->weight_delta.assign(::inputLayer[i]->weight_delta.size(), 0.f);
    }

    for (size_t i = 0; i < HIDENODE; i++) {
        ::hideLayer[i]->bias_delta = 0.f;
        ::hideLayer[i]->weight_delta.assign(::hideLayer[i]->weight_delta.size(), 0.f);
    }

    for (size_t i = 0; i < OUTNODE; i++) {
        ::outLayer[i]->bias_delta = 0.f;
    }

}

int main() {

    init();

    std::vector<double> buffer = ::utils::getFileData("D:\\traindata.txt");
    std::vector<Sample> train_data;
    for (size_t i = 0; i < buffer.size(); i += INNODE + OUTNODE) {
        Sample tmp;
        for (size_t t = 0; t < INNODE; t++) {
            tmp.in.push_back(buffer[i + t]);
        }
        for (size_t t = 0; t < OUTNODE; t++) {
            tmp.out.push_back(buffer[i + INNODE + t]);
        }
        train_data.push_back(tmp);
    }


    // training
    for (size_t times = 0; times < mosttimes; times++) {

        reset_delta();

        double error_max = 0.f;

        for (auto &idx : train_data) {

            for (size_t i = 0; i < INNODE; i++) {
                ::inputLayer[i]->value = idx.in[i];
            }

            // 正向传播
            for (size_t j = 0; j < HIDENODE; j++) {
                double sum = 0;
                for (size_t i = 0; i < INNODE; i++) {
                    sum += ::inputLayer[i]->value * ::inputLayer[i]->weight[j];
                }
                sum -= ::hideLayer[j]->bias;

                ::hideLayer[j]->value = utils::sigmoid(sum);
            }

            for (size_t j = 0; j < OUTNODE; j++) {
                double sum = 0;
                for (size_t i = 0; i < HIDENODE; i++) {
                    sum += ::hideLayer[i]->value * ::hideLayer[i]->weight[j];
                }
                sum -= ::outLayer[j]->bias;

                ::outLayer[j]->value = utils::sigmoid(sum);
            }

            // 计算误差
            double error = 0.f;
            for (size_t i = 0; i < OUTNODE; i++) {
                double tmp = std::fabs(::outLayer[i]->value - idx.out[i]);
                error += tmp * tmp / 2;
            }

            error_max = std::max(error_max, error);

            // 反向传播

            for (size_t i = 0; i < OUTNODE; i++) {
                double bias_delta = -(idx.out[i] - ::outLayer[i]->value) *
                    ::outLayer[i]->value * (1.0 - ::outLayer[i]->value);
                ::outLayer[i]->bias_delta += bias_delta;
            }

            for (size_t i = 0; i < HIDENODE; i++) {
                for (size_t j = 0; j < OUTNODE; j++) {
                    double weight_delta = (idx.out[j] - ::outLayer[j]->value) *
                        ::outLayer[j]->value * (1.0 - ::outLayer[j]->value) *
                        ::hideLayer[i]->value;
                    ::hideLayer[i]->weight_delta[j] += weight_delta;
                }
            }

            for (size_t i = 0; i < HIDENODE; i++) {
                double sum = 0;
                for (size_t j = 0; j < OUTNODE; j++) {
                    sum += -(idx.out[j] - ::outLayer[j]->value) *
                        ::outLayer[j]->value * (1.0 - ::outLayer[j]->value) *
                        ::hideLayer[i]->weight[j];
                }
                ::hideLayer[i]->bias_delta +=
                    sum * ::hideLayer[i]->value * (1.0 - ::hideLayer[i]->value);
            }

            for (size_t i = 0; i < INNODE; i++) {
                for (size_t j = 0; j < HIDENODE; j++) {
                    double sum = 0.f;
                    for (size_t k = 0; k < OUTNODE; k++) {
                        sum += (idx.out[k] - ::outLayer[k]->value) *
                            ::outLayer[k]->value * (1.0 - ::outLayer[k]->value) *
                            ::hideLayer[j]->weight[k];
                    }
                    ::inputLayer[i]->weight_delta[j] +=
                        sum *
                        ::hideLayer[j]->value * (1.0 - ::hideLayer[j]->value) *
                        ::inputLayer[i]->value;
                }
            }

        }

        if (error_max < ::threshold) {
            std::cout << "Success with " << times + 1 << " times training." << std::endl;
            std::cout << "Maximum error: " << error_max << std::endl;
            break;
        }

        auto train_data_size = double(train_data.size());

        for (size_t i = 0; i < INNODE; i++) {
            for (size_t j = 0; j < HIDENODE; j++) {
                ::inputLayer[i]->weight[j] +=
                    rate * ::inputLayer[i]->weight_delta[j] / train_data_size;
            }
        }

        for (size_t i = 0; i < HIDENODE; i++) {
            ::hideLayer[i]->bias +=
                rate * ::hideLayer[i]->bias_delta / train_data_size;
            for (size_t j = 0; j < OUTNODE; j++) {
                ::hideLayer[i]->weight[j] +=
                    rate * ::hideLayer[i]->weight_delta[j] / train_data_size;
            }
        }

        for (size_t i = 0; i < OUTNODE; i++) {
            ::outLayer[i]->bias +=
                rate * ::outLayer[i]->bias_delta / train_data_size;
        }

    }
    
    std::vector<Sample> test_data = utils::getTestData("D:\\testdata.txt");

    // predict
    for (auto &idx : test_data) {

        for (size_t i = 0; i < INNODE; i++) {
            ::inputLayer[i]->value = idx.in[i];
        }

        for (size_t j = 0; j < HIDENODE; j++) {
            double sum = 0;
            for (size_t i = 0; i < INNODE; i++) {
                sum += ::inputLayer[i]->value * inputLayer[i]->weight[j];
            }
            sum -= ::hideLayer[j]->bias;

            ::hideLayer[j]->value = utils::sigmoid(sum);
        }

        for (size_t j = 0; j < OUTNODE; j++) {
            double sum = 0;
            for (size_t i = 0; i < HIDENODE; i++) {
                sum += ::hideLayer[i]->value * ::hideLayer[i]->weight[j];
            }
            sum -= ::outLayer[j]->bias;

            ::outLayer[j]->value = utils::sigmoid(sum);

            idx.out.push_back(::outLayer[j]->value);

            for (auto &tmp : idx.in) {
                std::cout << tmp << " ";
            }
            for (auto &tmp : idx.out) {
                std::cout << tmp << " ";
            }
            std::cout << std::endl;
        }

    }
    
    return 0;
}

训练集:

0 0 0

0 1 1

1 0 1

1 1 0

0.8 0.8 0

0.6 0.6 0

0.4 0.4 0

0.2 0.2 0

1.0 0.8 1

1.0 0.6 1

1.0 0.4 1

1.0 0.2 1

0.8 0.6 1

0.6 0.4 1

0.4 0.2 1

0.2 0 1

0.999 0.666 1

0.666 0.333 1

0.333 0 1

0.8 0.4 1

0.4 0 1

0 0.123 1

0.12 0.23 1

0.23 0.34 1

0.34 0.45 1

0.45 0.56 1

0.56 0.67 1

0.67 0.78 1

0.78 0.89 1

0.89 0.99 1

测试用例: 0.111 0.112 0.001 0.999 0.123 0.345 0.123 0.456 0.123 0.789 0.234 0.567 0.234 0.678 0.387 0.401 0.616 0.717 0.701 0.919  

标签:std,++,hideLayer,神经网络,outLayer,value,简单,size
来源: https://www.cnblogs.com/xusi/p/16290824.html

本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享;
2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关;
3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关;
4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除;
5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。

专注分享技术,共同学习,共同进步。侵权联系[81616952@qq.com]

Copyright (C)ICode9.com, All Rights Reserved.

ICode9版权所有