123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163 |
- //
- // Created by Austin on 2023/10/10.
- //
- #include "EntropyWeights.h"
- #include <QDebug>
- #include <numeric>
- #include <cmath>
- EntropyWeights::EntropyWeights(const EntropyMat& mat,
- const QVector<bool>& direction)
- : mat_(mat), direction_(direction), ymin_(0.002), ymax_(0.996)
- {
- index_num_ = mat_.count();
- if (index_num_ == 0)
- {
- sample_num_ = 0;
- }
- else
- {
- sample_num_ = mat_.at(0).count();
- }
- }
- void EntropyWeights::setYMin(double ymin)
- {
- if (ymin < 0.002)
- {
- ymin_ = 0.002;
- }
- else if (ymin > ymax_)
- {
- return;
- }
- ymin_ = ymin;
- }
- void EntropyWeights::setYMax(double ymax)
- {
- if (ymax > 0.996)
- {
- ymax_ = 0.996;
- }
- else if (ymax < ymin_)
- {
- return;
- }
- ymax_ = ymax;
- }
- void EntropyWeights::getWeights(QVector<double>& weights, QVector<double>& score)
- {
- // 计算第j个指标下,第i个样本占该指标的比重p
- EntropyMat pMat;
- for (int i = 0; i < mat_.count(); i++)
- {
- pMat.append(QVector<double>(mat_.at(i).count(), 0));
- double sum = std::accumulate(mat_.at(i).begin(), mat_.at(i).end(), 0.0);
- for (int j = 0; j < mat_.at(i).count(); j++)
- {
- pMat[i][j] = mat_[i][j] / sum;
- }
- }
- double k = 1 / std::log(sample_num_);
- // 计算第j个指标熵值
- QVector<double> e(mat_.count(), 0);
- for (int i = 0; i < mat_.count(); i++)
- {
- // QVector<double> f(mat_.at(i).count(), 0.0);
- double fSum = 0;
- for (int j = 0; j < mat_.at(i).count(); j++)
- {
- fSum += pMat[i][j] * std::log(pMat[i][j]);
- }
- e[i] = -k * fSum;
- }
- //qDebug() << e;
- // 计算信息熵冗余度
- QVector<double> d(mat_.count(), 0);
- for (int i = 0; i < e.count(); i++)
- {
- d[i] = 1 - e[i];
- }
- // 求权值
- double dSum = std::accumulate(d.begin(), d.end(), 0.0);
- for (auto v : d)
- {
- weights.append(v / dSum);
- }
- for (int i = 0; i < sample_num_; ++i)
- {
- double s = 0;
- for (int j = 0; j < index_num_; j++)
- {
- s += pMat[j][i] * weights[j];
- }
- score.append(s * 100);
- }
- }
- /**
- * 实现正向或负向指标归一化,返回归一化后的数据矩阵
- */
- void EntropyWeights::normalization()
- {
- for (int i = 0; i < mat_.count(); i++)
- {
- double minValue = 0;
- double maxValue = 0;
- getMinMax(mat_.at(i), minValue, maxValue);
- if (direction_.at(i))
- {
- for (int j = 0; j < mat_.at(i).count(); j++)
- {
- mat_[i][j] = (ymax_ - ymin_) * (mat_.at(i).at(j) - minValue) / (maxValue - minValue) + ymin_;
- }
- }
- else
- {
- for (int j = 0; j < mat_.at(i).count(); j++)
- {
- mat_[i][j] = (ymax_ - ymin_) * (maxValue - mat_.at(i).at(j)) / (maxValue - minValue) + ymin_;
- }
- }
- }
- }
- void EntropyWeights::getMinMax(const QVector<double>& in, double& min, double& max)
- {
- if (in.count() > 0)
- {
- min = max = in.at(0);
- for (int i = 1; i < in.count(); i++)
- {
- if (in.at(i) < min)
- {
- min = in.at(i);
- }
- else if (in.at(i) > max)
- {
- max = in.at(i);
- }
- }
- }
- }
- void EntropyWeights::compute(QVector<double>& weights, QVector<double>& score)
- {
- normalization();
- qDebug() << mat_;
- getWeights(weights, score);
- // qDebug() << "mat_" << mat_;
- }
|