EntropyWeights.cpp 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. //
  2. // Created by Austin on 2023/10/10.
  3. //
  4. #include "EntropyWeights.h"
  5. #include <QDebug>
  6. #include <numeric>
  7. #include <cmath>
  8. EntropyWeights::EntropyWeights(const EntropyMat& mat,
  9. const QVector<bool>& direction)
  10. : mat_(mat), direction_(direction), ymin_(0.002), ymax_(0.996)
  11. {
  12. index_num_ = mat_.count();
  13. if (index_num_ == 0)
  14. {
  15. sample_num_ = 0;
  16. }
  17. else
  18. {
  19. sample_num_ = mat_.at(0).count();
  20. }
  21. }
  22. void EntropyWeights::setYMin(double ymin)
  23. {
  24. if (ymin < 0.002)
  25. {
  26. ymin_ = 0.002;
  27. }
  28. else if (ymin > ymax_)
  29. {
  30. return;
  31. }
  32. ymin_ = ymin;
  33. }
  34. void EntropyWeights::setYMax(double ymax)
  35. {
  36. if (ymax > 0.996)
  37. {
  38. ymax_ = 0.996;
  39. }
  40. else if (ymax < ymin_)
  41. {
  42. return;
  43. }
  44. ymax_ = ymax;
  45. }
  46. void EntropyWeights::getWeights(QVector<double>& weights, QVector<double>& score)
  47. {
  48. // 计算第j个指标下,第i个样本占该指标的比重p
  49. EntropyMat pMat;
  50. for (int i = 0; i < mat_.count(); i++)
  51. {
  52. pMat.append(QVector<double>(mat_.at(i).count(), 0));
  53. double sum = std::accumulate(mat_.at(i).begin(), mat_.at(i).end(), 0.0);
  54. for (int j = 0; j < mat_.at(i).count(); j++)
  55. {
  56. pMat[i][j] = mat_[i][j] / sum;
  57. }
  58. }
  59. double k = 1 / std::log(sample_num_);
  60. // 计算第j个指标熵值
  61. QVector<double> e(mat_.count(), 0);
  62. for (int i = 0; i < mat_.count(); i++)
  63. {
  64. // QVector<double> f(mat_.at(i).count(), 0.0);
  65. double fSum = 0;
  66. for (int j = 0; j < mat_.at(i).count(); j++)
  67. {
  68. fSum += pMat[i][j] * std::log(pMat[i][j]);
  69. }
  70. e[i] = -k * fSum;
  71. }
  72. //qDebug() << e;
  73. // 计算信息熵冗余度
  74. QVector<double> d(mat_.count(), 0);
  75. for (int i = 0; i < e.count(); i++)
  76. {
  77. d[i] = 1 - e[i];
  78. }
  79. // 求权值
  80. double dSum = std::accumulate(d.begin(), d.end(), 0.0);
  81. for (auto v : d)
  82. {
  83. weights.append(v / dSum);
  84. }
  85. for (int i = 0; i < sample_num_; ++i)
  86. {
  87. double s = 0;
  88. for (int j = 0; j < index_num_; j++)
  89. {
  90. s += pMat[j][i] * weights[j];
  91. }
  92. score.append(s * 100);
  93. }
  94. }
  95. /**
  96. * 实现正向或负向指标归一化,返回归一化后的数据矩阵
  97. */
  98. void EntropyWeights::normalization()
  99. {
  100. for (int i = 0; i < mat_.count(); i++)
  101. {
  102. double minValue = 0;
  103. double maxValue = 0;
  104. getMinMax(mat_.at(i), minValue, maxValue);
  105. if (direction_.at(i))
  106. {
  107. for (int j = 0; j < mat_.at(i).count(); j++)
  108. {
  109. mat_[i][j] = (ymax_ - ymin_) * (mat_.at(i).at(j) - minValue) / (maxValue - minValue) + ymin_;
  110. }
  111. }
  112. else
  113. {
  114. for (int j = 0; j < mat_.at(i).count(); j++)
  115. {
  116. mat_[i][j] = (ymax_ - ymin_) * (maxValue - mat_.at(i).at(j)) / (maxValue - minValue) + ymin_;
  117. }
  118. }
  119. }
  120. }
  121. void EntropyWeights::getMinMax(const QVector<double>& in, double& min, double& max)
  122. {
  123. if (in.count() > 0)
  124. {
  125. min = max = in.at(0);
  126. for (int i = 1; i < in.count(); i++)
  127. {
  128. if (in.at(i) < min)
  129. {
  130. min = in.at(i);
  131. }
  132. else if (in.at(i) > max)
  133. {
  134. max = in.at(i);
  135. }
  136. }
  137. }
  138. }
  139. void EntropyWeights::compute(QVector<double>& weights, QVector<double>& score)
  140. {
  141. normalization();
  142. qDebug() << mat_;
  143. getWeights(weights, score);
  144. // qDebug() << "mat_" << mat_;
  145. }