EntropyWeights.cpp 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. //
  2. // Created by Austin on 2023/10/10.
  3. //
  4. #include "EntropyWeights.h"
  5. #include <QDebug>
  6. #include <numeric>
  7. #include <cmath>
  8. EntropyWeights::EntropyWeights(const EntropyMat &mat, const QVector<bool> &direction)
  9. : ymin_(0.002), ymax_(0.996), mat_(mat), direction_(direction)
  10. {
  11. index_num_ = mat_.count();
  12. if (index_num_ == 0) {
  13. sample_num_ = 0;
  14. } else {
  15. sample_num_ = mat_.at(0).count();
  16. }
  17. }
  18. void EntropyWeights::setYMin(double ymin)
  19. {
  20. if (ymin < 0.002) {
  21. ymin_ = 0.002;
  22. } else if (ymin > ymax_) {
  23. return;
  24. }
  25. ymin_ = ymin;
  26. }
  27. void EntropyWeights::setYMax(double ymax)
  28. {
  29. if (ymax > 0.996) {
  30. ymax_ = 0.996;
  31. } else if (ymax < ymin_) {
  32. return;
  33. }
  34. ymax_ = ymax;
  35. }
  36. void EntropyWeights::getWeights(QVector<double> &weights, QVector<double> &score)
  37. {
  38. // 计算第j个指标下,第i个样本占该指标的比重p
  39. EntropyMat pMat;
  40. for (int i = 0; i < mat_.count(); i++) {
  41. pMat.append(QVector<double>(mat_.at(i).count(), 0));
  42. double sum = std::accumulate(mat_.at(i).begin(), mat_.at(i).end(), 0.0);
  43. for (int j = 0; j < mat_.at(i).count(); j++) {
  44. pMat[i][j] = mat_[i][j] / sum;
  45. }
  46. }
  47. double k = 1 / std::log(sample_num_);
  48. // 计算第j个指标熵值
  49. QVector<double> e(mat_.count(), 0);
  50. for (int i = 0; i < mat_.count(); i++) {
  51. // QVector<double> f(mat_.at(i).count(), 0.0);
  52. double fSum = 0;
  53. for (int j = 0; j < mat_.at(i).count(); j++) {
  54. fSum += pMat[i][j] * std::log(pMat[i][j]);
  55. }
  56. e[i] = -k * fSum;
  57. }
  58. // qDebug() << e;
  59. // 计算信息熵冗余度
  60. QVector<double> d(mat_.count(), 0);
  61. for (int i = 0; i < e.count(); i++) {
  62. d[i] = 1 - e[i];
  63. }
  64. // 求权值
  65. double dSum = std::accumulate(d.begin(), d.end(), 0.0);
  66. for (auto v : d) {
  67. weights.append(v / dSum);
  68. }
  69. for (int i = 0; i < sample_num_; ++i) {
  70. double s = 0;
  71. for (int j = 0; j < index_num_; j++) {
  72. s += pMat[j][i] * weights[j];
  73. }
  74. score.append(s * 100);
  75. }
  76. }
  77. /**
  78. * 实现正向或负向指标归一化,返回归一化后的数据矩阵
  79. */
  80. void EntropyWeights::normalization()
  81. {
  82. for (int i = 0; i < mat_.count(); i++) {
  83. double minValue = 0;
  84. double maxValue = 0;
  85. getMinMax(mat_.at(i), minValue, maxValue);
  86. bool dir = true;
  87. if (!direction_.isEmpty()) {
  88. dir = direction_.at(i);
  89. }
  90. if (dir) {
  91. for (int j = 0; j < mat_.at(i).count(); j++) {
  92. mat_[i][j] = (ymax_ - ymin_) * (mat_.at(i).at(j) - minValue) / (maxValue - minValue) + ymin_;
  93. }
  94. } else {
  95. for (int j = 0; j < mat_.at(i).count(); j++) {
  96. mat_[i][j] = (ymax_ - ymin_) * (maxValue - mat_.at(i).at(j)) / (maxValue - minValue) + ymin_;
  97. }
  98. }
  99. }
  100. }
  101. void EntropyWeights::getMinMax(const QVector<double> &in, double &min, double &max)
  102. {
  103. if (in.count() > 0) {
  104. min = max = in.at(0);
  105. for (int i = 1; i < in.count(); i++) {
  106. if (in.at(i) < min) {
  107. min = in.at(i);
  108. } else if (in.at(i) > max) {
  109. max = in.at(i);
  110. }
  111. }
  112. }
  113. }
  114. void EntropyWeights::compute(QVector<double> &weights, QVector<double> &score)
  115. {
  116. normalization();
  117. qDebug() << mat_;
  118. getWeights(weights, score);
  119. // qDebug() << "mat_" << mat_;
  120. }