root 10 місяців тому
батько
коміт
e14b2388a8
100 змінених файлів з 8565 додано та 271 видалено
  1. 16 0
      .hbuilderx/launch.json
  2. 11 4
      App.vue
  3. 3 3
      api/index.js
  4. 5 5
      common/config.js
  5. 105 0
      components/ProgressPlus/ProgressPlus.vue
  6. 1 1
      components/echarts-uniapp/echarts-uniapp.vue
  7. 159 110
      components/mpvueEcharts/leiDa.vue
  8. 108 23
      examWidght/components/Li-ExamWidght/Li-ExamWidght.vue
  9. 11 1
      examWidght/components/lineProgress/index.vue
  10. 77 88
      examWidght/examWidght/index.vue
  11. 33 31
      index.html
  12. 23 5
      manifest.json
  13. 560 0
      newScale/EQtest/index.vue
  14. 204 0
      newScale/EQtest/paySuccess.vue
  15. 187 0
      newScale/EQtest/paymentPage.vue
  16. 386 0
      newScale/EQtest/testPage.vue
  17. 356 0
      newScale/EQtest/testResult.vue
  18. 15 0
      node_modules/.bin/mkdirp
  19. 17 0
      node_modules/.bin/mkdirp.cmd
  20. 18 0
      node_modules/.bin/mkdirp.ps1
  21. 15 0
      node_modules/.bin/rimraf
  22. 17 0
      node_modules/.bin/rimraf.cmd
  23. 18 0
      node_modules/.bin/rimraf.ps1
  24. 10 0
      node_modules/@gar/promisify/LICENSE.md
  25. 65 0
      node_modules/@gar/promisify/README.md
  26. 36 0
      node_modules/@gar/promisify/index.js
  27. 65 0
      node_modules/@gar/promisify/package.json
  28. 20 0
      node_modules/@npmcli/fs/LICENSE.md
  29. 60 0
      node_modules/@npmcli/fs/README.md
  30. 17 0
      node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js
  31. 121 0
      node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js
  32. 20 0
      node_modules/@npmcli/fs/lib/common/get-options.js
  33. 9 0
      node_modules/@npmcli/fs/lib/common/node.js
  34. 92 0
      node_modules/@npmcli/fs/lib/common/owner.js
  35. 22 0
      node_modules/@npmcli/fs/lib/copy-file.js
  36. 15 0
      node_modules/@npmcli/fs/lib/cp/LICENSE
  37. 22 0
      node_modules/@npmcli/fs/lib/cp/index.js
  38. 428 0
      node_modules/@npmcli/fs/lib/cp/polyfill.js
  39. 129 0
      node_modules/@npmcli/fs/lib/errors.js
  40. 8 0
      node_modules/@npmcli/fs/lib/fs.js
  41. 10 0
      node_modules/@npmcli/fs/lib/index.js
  42. 32 0
      node_modules/@npmcli/fs/lib/mkdir/index.js
  43. 81 0
      node_modules/@npmcli/fs/lib/mkdir/polyfill.js
  44. 28 0
      node_modules/@npmcli/fs/lib/mkdtemp.js
  45. 22 0
      node_modules/@npmcli/fs/lib/rm/index.js
  46. 239 0
      node_modules/@npmcli/fs/lib/rm/polyfill.js
  47. 39 0
      node_modules/@npmcli/fs/lib/with-temp-dir.js
  48. 19 0
      node_modules/@npmcli/fs/lib/write-file.js
  49. 66 0
      node_modules/@npmcli/fs/package.json
  50. 22 0
      node_modules/@npmcli/move-file/LICENSE.md
  51. 69 0
      node_modules/@npmcli/move-file/README.md
  52. 162 0
      node_modules/@npmcli/move-file/index.js
  53. 64 0
      node_modules/@npmcli/move-file/package.json
  54. 51 0
      node_modules/aggregate-error/index.d.ts
  55. 47 0
      node_modules/aggregate-error/index.js
  56. 9 0
      node_modules/aggregate-error/license
  57. 73 0
      node_modules/aggregate-error/package.json
  58. 61 0
      node_modules/aggregate-error/readme.md
  59. 2 0
      node_modules/balanced-match/.github/FUNDING.yml
  60. 21 0
      node_modules/balanced-match/LICENSE.md
  61. 97 0
      node_modules/balanced-match/README.md
  62. 62 0
      node_modules/balanced-match/index.js
  63. 76 0
      node_modules/balanced-match/package.json
  64. 21 0
      node_modules/brace-expansion/LICENSE
  65. 129 0
      node_modules/brace-expansion/README.md
  66. 201 0
      node_modules/brace-expansion/index.js
  67. 75 0
      node_modules/brace-expansion/package.json
  68. 16 0
      node_modules/cacache/LICENSE.md
  69. 703 0
      node_modules/cacache/README.md
  70. 237 0
      node_modules/cacache/get.js
  71. 46 0
      node_modules/cacache/index.js
  72. 29 0
      node_modules/cacache/lib/content/path.js
  73. 244 0
      node_modules/cacache/lib/content/read.js
  74. 19 0
      node_modules/cacache/lib/content/rm.js
  75. 189 0
      node_modules/cacache/lib/content/write.js
  76. 394 0
      node_modules/cacache/lib/entry-index.js
  77. 73 0
      node_modules/cacache/lib/memoization.js
  78. 30 0
      node_modules/cacache/lib/util/disposer.js
  79. 142 0
      node_modules/cacache/lib/util/fix-owner.js
  80. 7 0
      node_modules/cacache/lib/util/hash-to-segments.js
  81. 67 0
      node_modules/cacache/lib/util/move-file.js
  82. 35 0
      node_modules/cacache/lib/util/tmp.js
  83. 287 0
      node_modules/cacache/lib/verify.js
  84. 6 0
      node_modules/cacache/ls.js
  85. 112 0
      node_modules/cacache/package.json
  86. 83 0
      node_modules/cacache/put.js
  87. 31 0
      node_modules/cacache/rm.js
  88. 3 0
      node_modules/cacache/verify.js
  89. 15 0
      node_modules/chownr/LICENSE
  90. 3 0
      node_modules/chownr/README.md
  91. 167 0
      node_modules/chownr/chownr.js
  92. 66 0
      node_modules/chownr/package.json
  93. 47 0
      node_modules/clean-stack/index.d.ts
  94. 40 0
      node_modules/clean-stack/index.js
  95. 9 0
      node_modules/clean-stack/license
  96. 71 0
      node_modules/clean-stack/package.json
  97. 76 0
      node_modules/clean-stack/readme.md
  98. 24 0
      node_modules/commondir/LICENSE
  99. 3 0
      node_modules/commondir/example/dir.js
  100. 29 0
      node_modules/commondir/index.js

+ 16 - 0
.hbuilderx/launch.json

@@ -0,0 +1,16 @@
+{
+    "version" : "1.0",
+    "configurations" : [
+        {
+            "openVueDevtools" : true,
+            "type" : "uni-app:h5"
+        },
+	{
+		"mp-toutiao" : 
+		{
+			"launchtype" : "local"
+		},
+		"type" : "uniCloud"
+	}
+    ]
+}

+ 11 - 4
App.vue

@@ -1,8 +1,6 @@
 <script>
 	export default {
-		onLaunch: function() {
-			console.log('App Launch')
-		},
+		onLaunch: function() {},
 		onShow: function() {
 			console.log('App Show')
 		},
@@ -14,4 +12,13 @@
 
 <style>
 	/*每个页面公共css */
-</style>
+	@font-face {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		src: url('../../static/images/EQtest/font/AlibabaPuHuiTi-2-55-Regular/AlibabaPuHuiTi-2-55-Regular.woff2') format('woff2'),
+			url('../../static/images/EQtest/font/AlibabaPuHuiTi-2-55-Regular/AlibabaPuHuiTi-2-55-Regular.woff') format('woff'),
+			url('../../static/images/EQtest/font/AlibabaPuHuiTi-2-55-Regular/AlibabaPuHuiTi-2-55-Regular.ttf') format('truetype');
+		font-weight: normal;
+		font-style: normal;
+		font-display: swap;
+	}
+</style>

+ 3 - 3
api/index.js

@@ -1,12 +1,12 @@
 // 答题列表
 export const getScaleInfo = "scaleInfo"
 //首页查询量表信息
-export const getScaleTitle= 'subjectInfo/getSubjectByFlagV2'
+export const getScaleTitle = 'subjectInfo/getSubjectByFlagV2'
 //点击立即测试查询或生成用户
-export const getUserInfo="scaleExternalSource/save"
+export const getUserInfo = "scaleExternalSource/save"
 //提交量表答案进行提交
 export const getResult = "result"
 //获取金额
 export const queryPromotionBySubjectId = "api/promotionInfo/queryPromotionBySubjectId"
 //获取测试记录
-export const getRecordById='getRecordById'
+export const getRecordById = 'record/getWeiboRecordById'

+ 5 - 5
common/config.js

@@ -1,17 +1,17 @@
 // const H5 = ''
 export const urlphoto = "https://test.jue-ming.com";
 // export const urlphoto =  'https://152.136.24.101'
-export const url = "https://test.jue-ming.com";
+export const url = "https://hnhong-duo.com";
 // export const url ="http://118.195.171.239";
-// export const url ="http://10.113.248.4";
-export const baseUrl = `${url}:8444/`;
+// export const url = "http://10.113.248.4";
+export const baseUrl = `${url}:8445/`;
 // export const baseUrl = `${url}:8090/`;
 export const photoUrl = `${urlphoto}:8849/`
-export const scoketUrl= "wss://cognitive.wistcm.com:8090/websocket/"
+export const scoketUrl = "wss://cognitive.wistcm.com:8090/websocket/"
 // let institution = H5 === "H5" ? "PUB001" : "PUB001"; 
 export const textContent = {
 	institutionNo: "PUB001",
 	roleType: 1
 }
 
-export const VERSION_201= "2.0.1";
+export const VERSION_201 = "2.0.1";

+ 105 - 0
components/ProgressPlus/ProgressPlus.vue

@@ -0,0 +1,105 @@
+<template>
+	<view class="progress-plus" style="width: 100%">
+		<view class="progress-box">
+			<view class="gray"></view>
+			<view class="colors" :style="setColor()"></view>
+			<view class="icon icon1"></view>
+			<view class="icon icon2"></view>
+			<view class="icon icon3"></view>
+			<view class="icon icon4"></view>
+		</view>
+		<span style="display: inline-block;padding-left: 5px" :style="{color:color}">{{ score }}</span>
+	</view>
+</template>
+
+<script>
+	export default {
+		name: 'ProgressPlus',
+		props: {
+			color: {
+				type: String,
+				default: 'green'
+			},
+			percent: {
+				type: Number,
+				default: 0
+			},
+			score: {
+				type: Number,
+				default: 0
+			},
+			width: {
+				type: Number,
+				default: 200
+			}
+		},
+		methods: {
+			setColor() {
+				return {
+					'background-color': this.color,
+					'left': (this.percent - 100) + '%'
+				}
+			}
+		}
+	}
+</script>
+
+<style scoped lang="scss">
+	.progress-plus {
+		display: flex;
+		align-items: center;
+
+		&>span {
+			width: 48rpx;
+			font-size: 24rpx;
+		}
+
+		.progress-box {
+			display: inline-block;
+			position: relative;
+			overflow: hidden;
+			flex: 1;
+			border-radius: 8rpx;
+
+			&>view {
+				width: 100%;
+				height: 12rpx;
+			}
+
+			.gray {
+				background-color: #E4EAF0;
+			}
+
+			.colors {
+				/*background-color: green;*/
+				position: absolute;
+				top: 0;
+				transition: left 1s;
+			}
+
+			.icon {
+				position: absolute;
+				width: 2px;
+				bottom: 0;
+				top: 0;
+				background-color: #ffffff;
+
+				&.icon1 {
+					left: 20%;
+				}
+
+				&.icon2 {
+					left: 40%;
+				}
+
+				&.icon3 {
+					left: 60%;
+				}
+
+				&.icon4 {
+					left: 80%;
+				}
+			}
+		}
+	}
+</style>

+ 1 - 1
components/echarts-uniapp/echarts-uniapp.vue

@@ -19,7 +19,7 @@
 	 */
 	// import WxCanvas from './wx-canvas.js';
 	// import * as echarts from './echarts.min.js';
-	import * as echarts from 'echarts'
+	// import * as echarts from 'echarts'
 
 	var chartList = {}
 	export default {

+ 159 - 110
components/mpvueEcharts/leiDa.vue

@@ -20,137 +20,185 @@
 			}
 		},
 		watch: {
-			indicator:{
+			indicator: {
 				immediate: true,
 				handler(newValue, oldValue) {
 					this.drawLine();
 				}
 			}
 		},
-		mounted(){
-		},
+		mounted() {},
 
 		methods: {
 			drawLine() {
 				let _this = this;
-				 
-				let legendData =
-					_this.reference?.length > 0 ? ["你的分数", "参考值"] : ["你的分数"];
 
-				_this.option = {
-					color: ["#67F9D8", "#FFE434", "#56A3F1", "#FF917C"],
+				// let legendData =
+				// 	_this.reference?.length > 0 ? ["你的分数", "参考值"] : ["你的分数"];
+				// debugger
+				// _this.option = {
+				// 	color: ["#67F9D8", "#FFE434", "#56A3F1", "#FF917C"],
+				// 	title: {
+				// 		text: "",
+				// 		left: "left",
+				// 	},
+				// 	legend: {
+				// 		data: legendData,
+				// 		orient: "vertical",
+				// 		left: "left",
+				// 	},
+				// 	radar: [{
+				// 			center: ["25%", "50%"],
+				// 			radius: 20,
+				// 			startAngle: 2,
+				// 			splitNumber: 1,
+				// 			shape: "circle",
+				// 			axisName: {
+				// 				formatter: "【{value}】",
+				// 				color: "#428BD4",
+				// 			},
+				// 			splitArea: {
+				// 				areaStyle: {
+				// 					color: ["#77EADF", "#26C3BE", "#64AFE9", "#428BD4"],
+				// 					shadowColor: "rgba(0, 0, 0, 0.2)",
+				// 					shadowBlur: 10,
+				// 				},
+				// 			},
+				// 			axisLine: {
+				// 				lineStyle: {
+				// 					color: "rgba(211, 253, 250, 0.8)",
+				// 				},
+				// 			},
+				// 			splitLine: {
+				// 				lineStyle: {
+				// 					color: "rgba(211, 253, 250, 0.8)",
+				// 				},
+				// 			},
+				// 		},
+				// 		{
+				// 			indicator: _this.indicator,
+
+				// 			center: ["50%", "50%"],
+				// 			radius: 80,
+				// 			axisName: {
+				// 				color: "#fff",
+				// 				backgroundColor: "#666",
+				// 				borderRadius: 3,
+				// 				padding: [3, 5],
+				// 			},
+				// 		},
+				// 	],
+				// 	series: [{
+				// 		type: "radar",
+				// 		radarIndex: 1,
+				// 		data: [{
+				// 				value: _this.scoreList,
+				// 				name: "你的分数",
+				// 				symbol: "rect",
+				// 				symbolSize: 12,
+				// 				lineStyle: {
+				// 					type: "dashed",
+				// 				},
+				// 				label: {
+				// 					show: true,
+				// 					formatter: function(params) {
+				// 						return params.value;
+				// 					},
+				// 				},
+				// 			},
+				// 			{
+				// 				value: _this.reference,
+				// 				name: "参考值",
+				// 				label: {
+				// 					show: true,
+				// 					formatter: function(params) {
+				// 						return params.value;
+				// 					},
+				// 				},
+				// 				areaStyle: {
+				// 					color: {
+				// 						type: 'linear',
+				// 						x: 0,
+				// 						y: 0,
+				// 						x2: 0,
+				// 						y2: 1,
+				// 						colorStops: [{
+				// 							offset: 0,
+				// 							color: 'rgba(58,132,255, 0.5)' // 0% 处的颜色
+				// 						}, {
+				// 							offset: 1,
+				// 							color: 'rgba(58,132,255, 0)' // 100% 处的颜色
+				// 						}],
+				// 						global: false // 缺省为 false
+				// 					}
+				// 				},
+				// 				// areaStyle: {
+				// 				//   color: new echarts.graphic.RadialGradient(0.1, 0.6, 1, [
+				// 				//     {
+				// 				//       color: "rgba(255, 145, 124, 0.1)",
+				// 				//       offset: 0,
+				// 				//     },
+				// 				//     {
+				// 				//       color: "rgba(255, 145, 124, 0.9)",
+				// 				//       offset: 1,
+				// 				//     },
+				// 				//   ]),
+				// 				// },
+				// 			},
+				// 		],
+				// 	}, ],
+				// }
+				// console.log("==66666=====>", _this.scoreList)
+				// console.log(_this.option)
+				var option = {
 					title: {
-						text: "",
-						left: "left",
+						text: 'Basic Radar Chart'
 					},
 					legend: {
-						data: legendData,
-						orient: "vertical",
-						left: "left",
+						data: ['Allocated Budget', 'Actual Spending']
 					},
-					radar: [{
-							center: ["25%", "50%"],
-							radius: 20,
-							startAngle: 2,
-							splitNumber: 1,
-							shape: "circle",
-							axisName: {
-								formatter: "【{value}】",
-								color: "#428BD4",
+					radar: {
+						// shape: 'circle',
+						indicator: [{
+								name: 'Sales',
+								max: 6500
 							},
-							splitArea: {
-								areaStyle: {
-									color: ["#77EADF", "#26C3BE", "#64AFE9", "#428BD4"],
-									shadowColor: "rgba(0, 0, 0, 0.2)",
-									shadowBlur: 10,
-								},
+							{
+								name: 'Administration',
+								max: 16000
 							},
-							axisLine: {
-								lineStyle: {
-									color: "rgba(211, 253, 250, 0.8)",
-								},
+							{
+								name: 'Information Technology',
+								max: 30000
 							},
-							splitLine: {
-								lineStyle: {
-									color: "rgba(211, 253, 250, 0.8)",
-								},
+							{
+								name: 'Customer Support',
+								max: 38000
 							},
-						},
-						{
-							indicator: _this.indicator,
-
-							center: ["50%", "50%"],
-							radius: 80,
-							axisName: {
-								color: "#fff",
-								backgroundColor: "#666",
-								borderRadius: 3,
-								padding: [3, 5],
+							{
+								name: 'Development',
+								max: 52000
 							},
-						},
-					],
+							{
+								name: 'Marketing',
+								max: 25000
+							}
+						]
+					},
 					series: [{
-						type: "radar",
-						radarIndex: 1,
+						name: 'Budget vs spending',
+						type: 'radar',
 						data: [{
-								value: _this.scoreList,
-								name: "你的分数",
-								symbol: "rect",
-								symbolSize: 12,
-								lineStyle: {
-									type: "dashed",
-								},
-								label: {
-									show: true,
-									formatter: function(params) {
-										return params.value;
-									},
-								},
+								value: [4200, 3000, 20000, 35000, 50000, 18000],
+								name: 'Allocated Budget'
 							},
 							{
-								value: _this.reference,
-								name: "参考值",
-								label: {
-									show: true,
-									formatter: function(params) {
-										return params.value;
-									},
-								},
-								areaStyle: {
-									color: {
-										type: 'linear',
-										x: 0,
-										y: 0,
-										x2: 0,
-										y2: 1,
-										colorStops: [{
-											offset: 0,
-											color: 'rgba(58,132,255, 0.5)' // 0% 处的颜色
-										}, {
-											offset: 1,
-											color: 'rgba(58,132,255, 0)' // 100% 处的颜色
-										}],
-										global: false // 缺省为 false
-									}
-								},
-								// areaStyle: {
-								//   color: new echarts.graphic.RadialGradient(0.1, 0.6, 1, [
-								//     {
-								//       color: "rgba(255, 145, 124, 0.1)",
-								//       offset: 0,
-								//     },
-								//     {
-								//       color: "rgba(255, 145, 124, 0.9)",
-								//       offset: 1,
-								//     },
-								//   ]),
-								// },
-							},
-						],
-					}, ],
-				}
-				console.log("==66666=====>", _this.scoreList)
-				console.log(_this.option)
+								value: [5000, 14000, 28000, 26000, 42000, 21000],
+								name: 'Actual Spending'
+							}
+						]
+					}]
+				};
 			}
 		}
 	}
@@ -160,10 +208,11 @@
 	.container {
 		/* width: 750rpx; */
 		height: 320px;
-		 
+
 	}
-	.container100{
+
+	.container100 {
 		width: 100%;
 		height: 100%;
 	}
-</style>
+</style>

+ 108 - 23
examWidght/components/Li-ExamWidght/Li-ExamWidght.vue

@@ -9,7 +9,7 @@
 			</view>
 			<view class="exam-list-content fontSize16">
 				<!-- <scroll-view> -->
-				<templete v-if="QuestionList.questionType == 0">
+				<view v-if="QuestionList.questionType == 0">
 					<view class='question_select'>
 						<view v-for="(item, i) in QuestionList.QuestionOptionList" :key="i" class="disRowAroundStart"
 							:class="
@@ -21,26 +21,58 @@
 								<view class='question_circle'>
 
 								</view>
-								<view :style="{color:QuestionList.fldAnswer == item.fldOptionText?'#ffffff':'#656C74'}">
+								<view class="question_txt"
+									:style="{color:QuestionList.fldAnswer == item.fldOptionText?'#ffffff':'#656C74'}">
 									{{ item.fldOptionText }}
 								</view>
 							</view>
 						</view>
 					</view>
+				</view>
+				<view class="question_select" v-if="QuestionList.questionType == 2"><input class="ans_input"
+						@input="onKeyInput" placeholder="请输入答案" /></view>
+				<view class="question_select" v-if="QuestionList.questionType == 5">
+					<picker-view :indicator-style="indicatorStyle" :value="selectVal" @change="bindChange"
+						class="picker-view">
+						<picker-view-column>
+							<view class="item" v-for="(item,index) in selectArr" :key="index">{{item.name}}</view>
+						</picker-view-column>
+					</picker-view>
+				</view>
+				<view v-if="QuestionList.questionType == 8">
+					<view class="image_wrap">
+						<image style="width: 46%;" v-for="(item,index) in getAnswerImg(QuestionList.questionParam)"
+							:src="item" :key="index" alt="" mode="widthFix"></image>
+					</view>
+					<view class='question_select'>
+						<view v-for="(item, i) in QuestionList.QuestionOptionList" :key="i" class="disRowAroundStart"
+							:class="
+							  QuestionList.fldAnswer == item.fldOptionText
+							    ? 'checked_user'
+							    : 'checked_'
+							" @click.stop="onListItem(item)">
+							<view style='display: flex;align-items: center;'>
+								<view class='question_circle'>
 
-				</templete>
-				<view v-if="QuestionList.questionType == 2"><input class="ans_input" @input="onKeyInput"
-						placeholder="请输入答案" /></view>
+								</view>
+								<view class="question_txt"
+									:style="{color:QuestionList.fldAnswer == item.fldOptionText?'#ffffff':'#656C74'}">
+									{{ item.fldOptionText }}
+								</view>
+							</view>
+						</view>
+					</view>
+				</view>
 				<view class="exam-list-button">
 					<button v-if="currentIndex > 0" @click="goPrev" class="answer_button">
 						上一题
 					</button>
-					<!-- <button v-if="currentIndex < total && currentIndex != total - 1" @click="goNext"
+					<button v-if="currentIndex < total - 1 && QuestionList.questionType == 5" @click="goNext"
 						class="answer_button">
 						下一题
-					</button> -->
+					</button>
 
-					<button v-if="currentIndex + 1 == total" @click="subMit" style='margin-left:20px' class="answer_button">
+					<button v-if="currentIndex + 1 == total" @click="subMit" class="answer_button">
 						提交
 					</button>
 				</view>
@@ -62,8 +94,19 @@
 				// checked_: this.$imageUrl.urls.checked_,
 				isShowBtn: true,
 				blankAnswer: "",
+				indicatorStyle: `height: 50px;`,
+				selectVal: [0],
 			};
 		},
+		computed: {
+			selectArr() {
+				let arr = [];
+				if (this.QuestionList.questionType == 5) {
+					arr = JSON.parse(this.QuestionList.questionParam)
+				}
+				return arr;
+			}
+		},
 		mounted() {
 			this.isShowBtn = true;
 		},
@@ -72,8 +115,13 @@
 				this.$emit("goPrevAnswer");
 			},
 			goNext() {
-				this.$emit("goNextAnswer", this.blankAnswer);
-				this.this.blankAnswer = "";
+				if (this.QuestionList.questionType == '5') {
+					this.$emit("goNextAnswer", this.selectVal[0]);
+					this.selectVal = [0];
+				} else {
+					this.$emit("goNextAnswer", this.blankAnswer);
+					this.this.blankAnswer = "";
+				}
 			},
 			onItem(item) {
 				this.$emit("onItemAnswer", item);
@@ -95,6 +143,18 @@
 			onKeyInput: function(event) {
 				this.blankAnswer = event.target.value;
 			},
+			bindChange(e) {
+				this.selectVal = e.detail.value;
+			},
+			// 对应返回备选答案图片地址
+			getAnswerImg(param) {
+				param = JSON.parse(param);
+				let imgArr = [];
+				param.forEach(item => {
+					imgArr.push(`/static/scaleImages/${item.imgUrl}`)
+				})
+				return imgArr;
+			},
 		},
 	};
 </script>
@@ -114,12 +174,13 @@
 	}
 
 	.checked_ {
-		height: 40px;
+		display: flex;
+		align-items: center;
 		background: #E3FDFF;
 		border-radius: 10px 10px 10px 10px;
 		opacity: 1;
-		line-height: 40px;
 		margin-bottom: 18px;
+		min-height: 40px;
 	}
 
 	.checked_user {
@@ -127,8 +188,6 @@
 		color: #ffffff;
 		border-radius: 10px 10px 10px 10px;
 		opacity: 1;
-		height: 40px;
-		line-height: 40px;
 		margin-bottom: 18px;
 		/* color: #3fb4c9;
 		padding: 15px 0;
@@ -137,6 +196,7 @@
 		border-radius: 50px;
 		margin-bottom: 10px;
 		font-weight: bold; */
+		min-height: 40px;
 	}
 
 	.exam-list-button {
@@ -165,33 +225,58 @@
 		line-height: 26px;
 		background-color: #ffffff;
 		padding-top: 10px;
+		padding: 20rpx 40rpx 0 40rpx;
 
 	}
 
 	.answer_button {
-		width:100%;
+		flex: 1;
 		background-color: #03A2AD;
 		color: #ffffff;
 		font-size: 16px;
+		margin: 0 20rpx;
 	}
 
 	.question_circle {
-		width: 14px;
-		height: 14px;
+		width: 28rpx;
+		height: 28rpx;
 		border-radius: 50%;
 		border: 2px solid #03A2AD;
 		background: #ffffff;
-		margin-left: 14px;
-		margin-right: 20px
+		margin: 0 28rpx;
 	}
 
 	.question_select {
 		background-color: #ffffff;
 		padding-left: 10px;
 		padding-right: 10px;
-		padding-top: 30px;
-		padding-bottom: 40px;
-		border-bottom-left-radius:10px;
-		border-bottom-right-radius: 10px;
+		padding: 60rpx 40rpx 80rpx 40rpx;
+	}
+
+	.question_txt {
+		box-sizing: border-box;
+		padding: 10rpx 10rpx 10rpx 0;
+		flex: 1;
+		line-height: 1.8;
+		letter-spacing: 2px;
+	}
+
+	.picker-view {
+		width: 100%;
+		height: 500rpx;
+		margin-top: 20rpx;
+	}
+
+	.item {
+		line-height: 100rpx;
+		text-align: center;
+	}
+
+	.image_wrap {
+		width: 100%;
+		display: flex;
+		justify-content: space-around;
+		background: #ffffff;
+		padding-top: 20rpx;
 	}
 </style>

+ 11 - 1
examWidght/components/lineProgress/index.vue

@@ -3,6 +3,7 @@
 		<text v-show='total>0' style='color:black' space="ensp" decode="true"><text
 				class='pro-chu'>出题{{currentIndex+1}}</text><text class='pro-fen'>/{{total}}</text>
 			{{' '}}</text>
+		<view class="title_tips">选择你的答案进入下一题</view>
 		<progress style='width:100%;margin-top:8px' :percent="percentage" border-radius="20" backgroundColor='#E3FDFF'
 			activeColor="#3FB4C9" stroke-width="8" />
 	</view>
@@ -30,7 +31,6 @@
 		},
 		methods: {
 			percentageLineProgress(num, total) {
-				// debugger;
 				if (num == 0 || total == 0) {
 					return 0;
 				}
@@ -79,4 +79,14 @@
 		line-height: 20px;
 		margin-left: 3px;
 	}
+
+	.title_tips {
+		font-size: 24rpx;
+		font-family: Source Han Sans-Regular, Source Han Sans;
+		font-weight: 400;
+		color: #656C74;
+		line-height: 34rpx;
+		float: right;
+		margin: 16rpx 34rpx 0 0;
+	}
 </style>

+ 77 - 88
examWidght/examWidght/index.vue

@@ -1,8 +1,8 @@
 <template>
-	<view class='question_class'>
-		<view class=''>
-			<LineProgress class='pro-bg' :currentIndex="currentIndex" :total="total" />
-			<swiper :current="currentIndex" style="height: 98vh">
+	<view class="question_class">
+		<view class="">
+			<LineProgress class="pro-bg" :currentIndex="currentIndex" :total="total" />
+			<swiper :current="currentIndex" style="height: 98vh" :disable-touch="true">
 				<swiper-item style="overflow-y: scroll" v-for="(item, indexa) in QuestionListAll" :key="indexa" m
 					catchtouchmove="catchTouchMove">
 					<scroll-view scroll-y @scrolltolower="" style="height: 90vh">
@@ -13,12 +13,9 @@
 				</swiper-item>
 			</swiper>
 		</view>
-
 	</view>
-
 </template>
 
-
 <script>
 	import examWidght from "@/examWidght/components/Li-ExamWidght/Li-ExamWidght.vue";
 	import LineProgress from "@/examWidght/components/lineProgress";
@@ -26,13 +23,13 @@
 		getScaleInfo,
 		getUserInfo,
 		getResult,
-		queryPromotionBySubjectId
-	} from '@/api/index.js'
+		queryPromotionBySubjectId,
+	} from "@/api/index.js";
 	var _this;
 	export default {
 		components: {
 			examWidght,
-			LineProgress
+			LineProgress,
 		},
 		data() {
 			return {
@@ -59,25 +56,27 @@
 				// 当前索引
 				scale_index: 0,
 				//量表flag
-				flag: '',
+				flag: "",
 				//分享人的uid
-				uid: '',
+				uid: "",
 				//来源
-				source: '',
+				source: "",
 				//是否重复点击标志
 				loading: false,
 				//支付金额
-				price: '',
+				price: "",
 				//量表结果保存的唯一id
-				resultId: ''
-
-			}
-		},
-		destroyed() {
-
+				resultId: "",
+				userInfo: null,
+			};
 		},
+		destroyed() {},
 		onLoad(options) {
 			_this = this;
+			uni.pageScrollTo({
+				scrollTop: 0,
+				duration: 50
+			})
 			//查看是否传进来flag
 			_this.uid = options.uid;
 			_this.source = options.source;
@@ -90,19 +89,16 @@
 			}
 			//调用接口
 
-
 			//根据传过来的参数进行渲染题目
 			// console.log(_this.$request)
 			// this.queryTopic()
 		},
 
 		onHide() {
-
 			// clearInterval(this.timer);
 			// https://blog.csdn.net/weixin_42738504/article/details/125521004
 		},
 		methods: {
-
 			//根据flag查询渲染的结果
 			// queryTopic() {
 			// 	this.$request.get({
@@ -115,20 +111,16 @@
 			// },
 			//查询用户信息
 			selectUser() {
-				_this.$request
-					.post({
-						url: `${getUserInfo}`,
-						loadingTip: "加载中...",
-						data: {
-							uid: this.uid,
-							source: this.source,
-							flag: this.flag
-						},
-					}).then((res) => {
-						//查询到数据将用户信息
-						sessionStorage.setItem('user', JSON.stringify(res.data.user))
-						sessionStorage.setItem('token', res.data.token)
+				this.$request
+					.get({
+						url: 'system/temporaryUser',
+						loadingTip: "加载中..."
 					})
+					.then((res) => {
+						this.userInfo = res.data.user;
+						sessionStorage.setItem("user", JSON.stringify(res.data.user));
+						sessionStorage.setItem("token", res.data.token);
+					});
 			},
 			//上一题
 			goPrevAnswer() {
@@ -147,6 +139,11 @@
 			},
 			//下一题
 			goNextAnswer(blankAnswer) {
+				if (_this.scale_infos[_this.currentIndex].questionType == "5") {
+					let paramsArr = JSON.parse(_this.scale_infos[_this.currentIndex].questionParam);
+					blankAnswer = paramsArr[blankAnswer].label
+				}
+
 				if (blankAnswer) {
 					_this.QuestionListAll[_this.currentIndex].checkItems = blankAnswer;
 					_this.QuestionListAll[_this.currentIndex].fldAnswer = blankAnswer;
@@ -230,14 +227,13 @@
 					return;
 				} else {
 					_this.currentIndex = _this.currentIndex + 1;
-
-					_this.QuestionListAll[_this.currentIndex - 1].checkItems = item.fldOptionText;
+					_this.QuestionListAll[_this.currentIndex - 1].checkItems =
+						item.fldOptionText;
 					_this.QuestionListAll[_this.currentIndex - 1].fldAnswer =
 						item.fldOptionText;
 					let QuestionListNew = Object.assign({},
 						_this.QuestionListAll[_this.currentIndex]
 					);
-
 					_this.QuestionList = QuestionListNew;
 				}
 			},
@@ -264,8 +260,7 @@
 
 				//调用提交答案的接口
 
-				this.createResult()
-
+				this.createResult();
 
 				// uni.navigateTo({
 				// 	url:'/paymentPage/paymentPage/index'
@@ -274,11 +269,12 @@
 				// console.log(uni.$u)
 				// uni.$u.throttle(this.createResult, 2500);
 			},
+
 			// 生成测试结果
 			createResult() {
 				let _this = this;
 				if (this.loading) {
-					return
+					return;
 				}
 				this.loading = true;
 				let subMitListData = _this.QuestionListAll;
@@ -288,12 +284,10 @@
 					delete item.questionID;
 				});
 
-
-
 				let params = {
 					testPlanId: "",
 					scale_result: subMitListData,
-					userId: JSON.parse(sessionStorage.getItem('user')).id,
+					userId: JSON.parse(sessionStorage.getItem("user")).id,
 				};
 				console.log("提交的参数", params);
 
@@ -305,15 +299,27 @@
 					.post({
 						url: `${getResult}/${_this.flag}`,
 						loadingTip: "加载中...",
-						data: params
+						data: params,
 					})
 					.then((res) => {
 						_this.resultId = res.data;
+						_this.$request.post({
+							url: `scaleExternalSource/save`,
+							loadingTip: "加载中...",
+							data: {
+								currentUserId: _this.userInfo.id,
+								resultId: _this.resultId,
+								uid: _this.uid,
+								source: _this.source,
+								flag: _this.flag,
+							},
+						}).then(() => {})
 						uni.hideLoading();
 						// 答题结束获取支付
 						_this.getQueryPromotionBySubjectId(_this.flag);
 						_this.loading = false;
-					}).catch(() => {
+					})
+					.catch(() => {
 						uni.showToast({
 							icon: "none",
 							title: "提交失败",
@@ -323,6 +329,7 @@
 			},
 			// 获取支付金额
 			async getQueryPromotionBySubjectId(id) {
+				let _this = this;
 				let urls = queryPromotionBySubjectId + "/" + id;
 				await _this.$request
 					.get({
@@ -331,48 +338,29 @@
 						data: {},
 					})
 					.then((res) => {
-						_this.price = res.data.price;
-						if (res.data.price == 0) {
-							console.log("=获取支付金额接口数据==1==》", res);
+						console.log('量表支付信息', res.data);
+						let data = res.data;
+						if (data.price == 0) {
 							uni.navigateTo({
-								url: "/scaleTestResults/testResults/index?resultId=" +
-									_this.resultId +
-									"&messageShare=1",
+								url: `/scaleTestResults/testResults/index?resultId=${_this.resultId}&messageShare=1`
 							});
 						} else {
-							console.log("=获取支付金额接口数据=2===》", res);
-							let params = null
-							// #ifdef MP-WEIXIN
-							params = {
+							let params = {
 								productId: id,
-								userId: _this.userInfo?.user?.id,
+								userId: _this.userInfo?.id,
 								resultId: _this.resultId,
-								description: res.data.name,
-								total: res.data.price,
-								openId: _this.userInfo?.openId?.openId,
+								description: data.name,
+								total: data.price,
+								sceneType: uni.getSystemInfoSync().platform == "android" ?
+									"Android" : "iOS",
 							};
-							_this.getNative(params);
-							// #endif
-							// #ifdef MP-TOUTIAO	
-							params = {
-								resultId: _this.resultId,
-								subject: res.data.name,
-								body: res.data.name,
-								total_amount: res.data.price * 100,
-								msgPage: '',
-							};
-							_this.getNative(params);
-							// #endif
-							// #ifdef H5
-							let ips = "125.70.61.175";
-							params.payerClientIp = sessionStorage.getItem("ip") || ips;
-							params.sceneType = uni.getSystemInfoSync().platform == "android" ? "Android" : "iOS";
-							debugger;
-							_this.getH5Pay(params);
-							// #endif
+							sessionStorage.setItem('orderInfo', JSON.stringify(params))
+							uni.navigateTo({
+								url: "/paymentPage/paymentPage/index",
+							});
 						}
 					});
-			},
+			}
 		},
 	};
 </script>
@@ -382,7 +370,7 @@
 	}
 
 	.question_class {
-		background: linear-gradient(180deg, #D8FFF2 0%, #46E2ED 42%, #BEFFE8 98%);
+		background: linear-gradient(180deg, #d8fff2 0%, #46e2ed 42%, #beffe8 98%);
 		border-radius: 0px 0px 0px 0px;
 		padding-left: 10px;
 		padding-right: 10px;
@@ -394,20 +382,21 @@
 		padding-right: 10px;
 		padding-top: 20px;
 		/* background: url('/static/images/question_main.png') no-repeat 100% 100%; */
-		background-image: url('/static/images/question_main.png');
+		background-image: url("/static/images/question_main.png");
 		/* background-size:contain; */
 		background-size: 100% 100px;
 		background-repeat: no-repeat;
 		background-position: top;
 	}
+
 	.pro-bg {
-		background-image: url('/static/images/question_main1.png');
+		background-image: url("/static/images/question_main1.png");
 		/* background-size: contain; */
-		background-size:100% 100%;
+		background-size: 100% 100%;
 		background-position: top;
 		background-repeat: no-repeat;
-		padding-top:20px;
-		padding-left:10px;
-		padding-right:10px;
+		padding-top: 80rpx;
+		padding-left: 40rpx;
+		padding-right: 40rpx;
 	}
 </style>

+ 33 - 31
index.html

@@ -1,32 +1,34 @@
 <!DOCTYPE html>
-<html lang="en" >
-  <head>
-    <meta charset="UTF-8" />
-    <script>
-      var coverSupport = 'CSS' in window && typeof CSS.supports === 'function' && (CSS.supports('top: env(a)') ||
-        CSS.supports('top: constant(a)'))
-      document.write(
-        '<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0' +
-        (coverSupport ? ', viewport-fit=cover' : '') + '" />')
-    </script>
-    <title></title>
-    <!--preload-links-->
-    <!--app-context-->
-  </head>
-  <body>
-    <div id="app"><!--app-html--></div>
-    <script type="module" src="/main.js"></script>
-  </body>
-  <style>
-      html{
-		  font-size: 54px !important;
-		  background: #f6f6f6;
-	  }
-	  #app{
-		  width:100%;
-		  max-width: 10rem;
-		  margin:auto;
-		  min-height:100vh;
-	  }
-  </style>
-</html>
+<html lang="en">
+	<head>
+		<meta charset="UTF-8" />
+		<script>
+			var coverSupport = 'CSS' in window && typeof CSS.supports === 'function' && (CSS.supports('top: env(a)') ||
+				CSS.supports('top: constant(a)'))
+			document.write(
+				'<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0' +
+				(coverSupport ? ', viewport-fit=cover' : '') + '" />')
+		</script>
+		<title>丁香解忧</title>
+		<link rel="icon" href="/static/images/favicon.ico">
+		<script src="https://cdn.jsdelivr.net/npm/echarts@5.3.0/dist/echarts.min.js"></script>
+		<!--preload-links-->
+		<!--app-context-->
+	</head>
+	<body>
+		<div id="app"><!--app-html--></div>
+		<script type="module" src="/main.js"></script>
+	</body>
+	<style>
+		html {
+			background: #f6f6f6;
+		}
+
+		#app {
+			width: 100%;
+			max-width: 750rpx;
+			margin: auto;
+			min-height: 100vh;
+		}
+	</style>
+</html>

+ 23 - 5
manifest.json

@@ -1,7 +1,7 @@
 {
-    "name" : "psychic_pub",
-    "appid" : "",
-    "description" : "",
+    "name" : "psy_web_share",
+    "appid" : "__UNI__2917BA0",
+    "description" : "丁香解忧",
     "versionName" : "1.0.0",
     "versionCode" : "100",
     "transformPx" : false,
@@ -63,10 +63,28 @@
         "usingComponents" : true
     },
     "mp-toutiao" : {
-        "usingComponents" : true
+        "usingComponents" : true,
+        "optimization" : {
+            "subPackages" : true
+        }
     },
     "uniStatistics" : {
         "enable" : false
     },
-    "vueVersion" : "3"
+    "vueVersion" : "3",
+    "locale" : "zh-Hans",
+    "fallbackLocale" : "zh-Hans",
+    "h5" : {
+        "router" : {
+            "mode" : "history",
+            "base" : "/webo/"
+        },
+        "template" : "index.html",
+        "title" : "丁香解忧",
+        "optimization" : {
+            "treeShaking" : {
+                "enable" : true
+            }
+        }
+    }
 }

+ 560 - 0
newScale/EQtest/index.vue

@@ -0,0 +1,560 @@
+<template>
+	<view class="bg">
+		<image class="banner banner1" src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/banner1.png"
+			mode="widthFix"></image>
+		<image class="banner banner2" src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/banner2.png"
+			mode="widthFix"></image>
+		<image class="banner banner3" src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/banner3.png"
+			mode="widthFix"></image>
+		<image class="banner banner4" src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/banner4.png"
+			mode="widthFix"></image>
+		<view class="contral_box">
+			<view class="agreement" :class="{ 'shake-animation': isShake }">
+				<checkbox-group @change="checkboxChange">
+					<checkbox value="cb" :checked="isChecked" color="#999999" style="transform:scale(0.7)" />
+				</checkbox-group>
+				同意<text @click="toggle">《测评服务条款》</text>立即开始测试
+			</view>
+
+			<view class="go_test" @click="goTest">
+				<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/start_btn.png"
+					mode="widthFix"></image>
+			</view>
+		</view>
+		<view class="popup-main">
+			<!-- 普通弹窗 -->
+			<uni-popup ref="popup" class='' type="bottom" background-color="#fff">
+
+				<view class="popup-content">
+					<view class='popup-cuo'>
+						<uni-icons @click="close()" type="clear" size="30"></uni-icons>
+					</view>
+					<view class='popup-title-cla'>测评平台服务协议</view>
+					<view class='popup-detail'>
+						<view>【更新时间:2023年10月25日】</view>
+
+						<view>【生效时间:2023年10月25日】</view>
+
+
+						<view>尊敬的用户:</view>
+						<view>欢迎您选择并使用红朵心理测评产品。</view>
+						<view>
+							红朵提醒您,请仔细阅读以下全部内容,请您重点阅读的条款(尤其是知识产权授权、个人信息使用授权条款)。如果您不同意本协议的任一或全部条款内容,请不要以确认的形式(包括但不限于:点击确认、进入使用程序等)进行下一步操作或使用本协议项下服务。
+						</view>
+
+						<view>一、本协议的确认及相关定义</view>
+
+						<view>二、本协议的生效与终止</view>
+
+						<view>三、账户的注册、使用和注销</view>
+						<view>四、用户信息发布规则</view>
+						<view>五、【知识产权授权】*</view>
+						<view>六、【个人信息使用授权】*</view>
+						<view>七、个人信息与隐私保护</view>
+						<view>八、保密义务</view>
+						<view>九、反贿赂</view>
+						<view>十、免责条款</view>
+						<view>十一、违约责任</view>
+						<view>十二、有效通知及平台联系信息</view>
+						<view>十三、其他</view>
+						<view>一、本协议的确认及相关定义</view>
+						<view>(一)用户注册协议的确认</view>
+						<view>
+							当您确认接受用户注册协议(以下简称“本协议”或“用户协议”)或同意通过平台使用红朵心理产品,即表示您作为乙方(以下简称“乙方”“用户”)与红朵(平台运营主体:广州市人心网络科技有限公司,以下简称“甲方”、“红朵”)已达成协议关系,并自愿接受并遵守红朵提供服务涉及的,包括本协议在内的所有服务规范及相关子协议、管理规范、咨询服务规则等。您可以根据自主需要,在平台上使用包括但不限于测评、咨询、倾诉、课程、问答、读书会等产品,您在使用上述产品前,应当了解相关产品的使用规则,在详尽阅读了解并同意后使用。
+							红朵有权就本协议及相关子协议、管理规范、产品使用规则等文件(以下合称“服务协议”)进行更新,包括但不限于不定时发布或修订红朵服务规则、使用规则、公告、提示、通知等内容,更新将会通过系统提示、站内信息推送和/或您在平台留下的联系方式通知和提醒您注意,更新内容将在前述通知和提醒指定日期开始生效。如您继续使用服务,即意味着您同意接受我方更新后的服务协议,您的权利以及义务的表述、服务内容和费用及其结算标准等,均以最新的服务协议为准。如您不同意变更后的服务协议,请您停止使用本产品和服务。
+						</view>
+						<view>(二)相关定义</view>
+						<view>红朵:指甲方运营的心理测评网站(含手机端、PC端)、APP程序、其他网络渠道等。</view>
+						<view>红朵平台服务:包括但不限于红朵在平台上提供的测评、咨询、倾诉、课程、问答、读书会、知识科普等产品服务。</view>
+
+						<view>根据本协议,红朵为您提供平台产品服务,您将通过红朵心理测评平台进行产品服务的使用,协议具体内容供双方遵守执行:</view>
+						<view>(三)平台服务</view>
+						<view>1.甲方为乙方在平台提供一个心理服务媒介,乙方通过该服务媒介有权使用红朵心理测评平台提供的心理产品服务。</view>
+						<view>2.用户协议内容与其他单项产品规则、服务规则、网站规则或服务协议存在冲突的,以其他特别规则约定为准。</view>
+						<view>3.用户在使用相关产品前,应当充分阅读并理解产品规则的内容(如有),同意按照页面规则享有、使用产品服务。</view>
+						<view>4.红朵有权根据运营需要更新、优化、下架、暂停、终止平台功能、产品、服务等。红朵上线新的模块、功能、产品或服务的,均适用本用户协议的内容。</view>
+						<view>5.红朵有权因定期、不定期的平台维护或运营需要,暂缓、终止提供平台服务,用户将会提前得到平台通知。因此造成的传输或连接迟延、不准确、错误、疏漏或无法使用等,平台不承担相关责任。
+						</view>
+						<view>二、本协议的生效与终止</view>
+						<view>(一)生效</view>
+						<view>1.本协议在用户点击确认本用户协议,并登录使用平台时开始生效。</view>
+						<view>(二)、终止的情形</view>
+						<view>1.【用户发起的终止】用户有权通过以下任一方式终止本协议:</view>
+						<view>(1) 服务协议更新后,用户停止使用平台及其产品服务的;</view>
+						<view>(2)服务协议更新后,用户明示不接受更新后服务协议的;</view>
+						<view>(3)用户主动注销帐户的。</view>
+						<view>2.【红朵发起的终止】出现以下情况时,红朵有权无须通知用户即终止本协议:</view>
+						<view>(1) 乙方违反用户协议规定的,甲方有权不通知乙方即时终止本协议;</view>
+						<view>(2) 乙方盗用他人帐户、发布违法/侵权信息、骗取他人财物、扰乱网站秩序、采取不正当手段牟利或有其他违法/违约行为的,红朵有权对用户的帐户予以查封、注销;</view>
+						<view>(3) 除上述情形外,若用户3次(或以上)违反红朵服务协议相关产品服务规则的,红朵有权对用户的帐户予以查封、注销;</view>
+						<view>(4)用户的帐户被红朵依据用户协议注销的;</view>
+						<view>(5) 其它违反用户协议、服务协议的行为。</view>
+						<view>3.协议终止后的处理</view>
+						<view>(1)【用户信息披露】本协议终止后,除法律、法规有明确规定外,红朵不向用户或任何第三方提供帐户中的任何信息;同时也没有义务保留乙方的任何信息,乙方应当在协议终止前保存。
+						</view>
+						<view>(2)【平台权利】本协议终止后,红朵仍享有下列权利:</view>
+						<view>
+							①继续保存用户使用平台及产品服务时,对服务协议等各类文件的勾选、确认记录,对产品服务的使用痕迹、路径等(该信息不得涉及用户使用信息缓存、用户个人信息或用户隐私信息等,不得将该信息提供给任何第三方);
+						</view>
+						<view>② 对于用户的违约行为、造成的侵权损害等,平台保留追究违约责任的权利。</view>
+						<view>三、账户的注册、使用和注销</view>
+						<view>1.您确认:在注册、使用红朵平台及其产品服务时,您应当具备完全民事行为能力。</view>
+						<view>2.您是未满18周岁的未成年人的,应当取得监护人同意,由监护人进行用户注册,并在监护人同意和监护下使用红朵平台产品服务。</view>
+						<view>3.用户不具备用户资格的,由此产生一切责任、后果由使用人及其监护人承担,红朵有权立即注销不具备用户资格的帐户。</view>
+						<view>4.乙方注册、使用的帐户所有权归属甲方,乙方享有帐户的排他使用权,非经乙方许可,任何第三方不得使用乙方注册的帐户,任何第三方使用乙方帐户进行操作的,视为乙方本人的操作。
+						</view>
+						<view>5.用户按照平台注册流程完成注册程序后,可以获得平台帐户的使用权并成为平台用户。用户可以按平台显示的功能使用帐户,设置或修改帐户密码等,用户对自己的帐户和密码安全负责。
+						</view>
+						<view>6.帐户不得以任何方式转让、赠与或继承,发生任何不当使用帐户的行为或可能危害帐户使用安全的,用户应当立即通知红朵,要求红朵暂停帐户服务;红朵亦有权主动介入,暂停提供帐户服务。
+						</view>
+						<view>
+							7.一位用户限使用一个帐户。凡存在不当注册或不当使用多个帐户的,红朵有权注销多余帐户(保留最早注册帐户,数据保留以最早注册帐户为准)并视情况拒绝提供用户服务,因此对红朵造成损失的,用户应当承担赔偿责任。
+						</view>
+
+						<view>
+							8.实名认证:根据中华人民共和国法律、法规的规定,为了保护网络信息安全及更好地向用户提供用户服务,红朵有权要求用户及时完成实名认证;用户在使用红朵产品服务时,按使用规则要求需要进行实名认证的,应当在完成实名认证后使用相关产品服务。
+						</view>
+						<view>9.账户注销:</view>
+						<view>(1)用户有权主动注销账户,账户注销后,红朵不得继续保留用户个人信息,用户个人信息应当在十个工作日内删除;</view>
+						<view>
+							(2)用户使用帐户过程中,可以要求红朵删除其个人信息;用户在帐户使用过程中要求红朵删除其个人信息的,可能会导致部分产品服务无法提供,因此造成的用户损失由用户自行承担;删除个人信息后,长期未登录使用的帐户,红朵有权暂停帐户服务。
+						</view>
+						<view>10.合法性</view>
+						<view>(1)用户应当遵守中华人民共和国法律、法规的规定,不得将红朵产品服务用于任何违法、违规、违背道德的行为,否则红朵有权拒绝提供产品服务、注销帐户等。严禁包括但不限于以下行为:
+						</view>
+						<view>① 违反中华人民共和国宪法、法律、法规、政策规定的;</view>
+						<view>② 危害国家安全、泄露国家秘密、颠覆国家政权、破坏国家统一的;</view>
+						<view>③ 损害国家、民族荣誉和利益的,损害社会公共利益的;</view>
+						<view>④ 煽动民族仇恨、民族歧视、破坏民族团结的;</view>
+						<view>⑤ 宣扬邪教、封建迷信的;</view>
+						<view>⑥ 散布谣言、扰乱社会秩序、破坏社会稳定的;</view>
+						<view>⑦ 散布淫秽、色情、赌博、暴力、凶杀、恐怖或者教唆违法、犯罪的;</view>
+						<view>⑧ 侮辱、诽谤他人,侵害他人合法权益的;</view>
+						<view>⑨ 违背社会公序良俗,影响恶劣的;</view>
+						<view>⑩ 以非法目的使用帐户、产品服务、网络服务系统的;</view>
+						<view>⑪ 故意制作、传播计算机病毒等破坏性程序的;</view>
+						<view>⑫ 其他危害计算机信息网络安全的行为;</view>
+						<view>⑬ 有法律、行政法规、政府政策禁止的其他行为。</view>
+						<view>
+							(2)用户设置的帐户昵称不得违反国家法律、法规及红朵相关规则,不得使用国家政要人名、不得使用侮辱诽谤他人的名称、不得使用侵犯第三方知识产权或企业字号的名称,否则红朵有权终止用户的帐户昵称使用;因此造成任何权属争议、赔偿、损失等均由用户承担。
+						</view>
+						<view>11.真实性</view>
+						<view>(1)用户完成实名注册和使用产品服务时,应当提供真实、准确、最新及完整的本人资料,对于资料更新的应该及时进行帐户信息更新,以确保资料的时效性。</view>
+						<view>(2)用户提供的资料错误、不实、超过时效、不完整或具误导性的,红朵有权暂停或终止提供帐户服务。</view>
+						<view>12.更新维护</view>
+						<view>(1)用户应当及时更新帐户信息,以保证提供的信息最新、真实、完整、有效。</view>
+						<view>
+							(2)红朵按用户提供的最新联系信息无法与用户取得联系、用户未按红朵的要求及时提供信息、用户提供的信息明显不实或经行政司法机关核实为无效信息的,用户应当承担因此造成的全部损失与不利后果。
+						</view>
+						<view>(3)红朵有权暂停或终止提供帐户服务,直至用户更新最新的用户信息,红朵不因此承担任何责任。</view>
+						<view>四、用户信息发布规则</view>
+						<view>
+							1.用户发布的信息、评论、文章等内容的著作权归属用户所有或已获得著作权人授权可用于红朵平台发布,不得违反法律法规的规定、不得违背社会公序良俗、不得侵犯任何第三方或红朵的合法权益、不得虚构任何不实信息或传播谣言、不得发布广告内容或联系方式、引用内容不超过发布内容的20%等,否则,红朵有权下架、删除、屏蔽上述信息、封禁或注销帐户等,造成的任何法律责任由用户自行承担。
+						</view>
+						<view>2.您应当确保所发布的内容不包含以下类目:</view>
+						<view>(1)违反国家法律、法规规定的;</view>
+						<view>(2)干涉政治宣传、封建迷信、淫秽、色情、赌博、暴力、恐怖或者教唆犯罪的;</view>
+						<view>(3)欺诈、虚假、不准确或存在误导性的;</view>
+						<view>(4)侵犯他人知识产权、涉及第三方商业秘密或其他专有权利的;</view>
+						<view>(5)侮辱、诽谤、恐吓、涉及他人隐私等侵害他人合法权益的;</view>
+						<view>(6)可能破坏、篡改、删除、影响网站系统正常运行、窃取平台数据、窃取用户数据及个人资料的;</view>
+						<view>(7)发布含病毒、木马、爬虫等恶意软件、程序代码的;</view>
+						<view>(8)发布未经证实的信息、谣言的;</view>
+						<view>(9)发布非红朵的广告宣传内容、个人或他人联系信息的;</view>
+						<view>(10)其他损害社会公共利益、违背公序良俗或依据平台其他规定不得在网站发布的。</view>
+						<view>3.权利主张</view>
+						<view>若您认为本平台的内容(含其他用户发布的内容)或通过本平台获得的内容可能侵犯您的或任何第三方的合法权益,请您以书面形式或通过平台“投诉/举报”渠道向红朵反馈。
+
+							反馈时,请提供您的身份证明(身份证信息、联系信息)、内容权属证明、具体链接(URL)及详细侵权情况说明等,红朵将尽快核实相应内容并与您取得联系;确定争议双方信息后,将联系双方协调处理。
+
+							如果您陈述的权利通知失实,由您承担由此造成的全部法律责任。</view>
+						<view>五、【知识产权授权】*</view>
+						<view>
+							1.红朵拥有平台内的所有产品、技术、程序、资料、信息内容(包括但不限于文字、图片、图像、照片、音频、视频、图表、色彩、版面设计、电子文档)的所有权利(包括但不限于著作权、商标权、专利权、商业秘密及其他所有相关权利)。
+						</view>
+						<view>
+							2.用户在平台发表的各类作品,同意将作品的著作权【独家】【永久】【无偿】授权给红朵商业使用:包括但不限于复制权、发行权、出租权、展览权、表演权、放映权、广播权、信息网络传播权、摄制权、改编权、翻译权、汇编权、维权权利以及应当由著作权人享有的其他著作财产权利。
+						</view>
+						<view>3.用户在平台发表的各类作品,被第三人侵权转载、使用的,红朵有权单独起诉向第三人主张维权,红朵提起诉讼所支付的成本由红朵承担,获得的全部赔偿由红朵享有。</view>
+						<view>
+							4.红朵有权使用上述各类授权作品,包括但不限于在平台发表、在其他自媒体渠道发表、改编为其他类型作品、汇编为其他作品或与红朵其他作品共同进行汇编等,红朵改编为其他类型作品的,改编作品的所有权归属红朵所有。
+						</view>
+						<view>5.用户有权在自有渠道、个人的自媒体帐户发表上述各类授权作品,但不得授权任何第三方使用(包括不得授权供职单位使用),用户使用上述各类授权作品同时不得阻碍红朵使用。</view>
+						<view>六、【个人信息使用授权】*</view>
+						<view>
+							1.红朵平台记录的运营数据,包括但不限于用户信息、用户列表、用户关系、用户的使用数据、交易数据、订单数据等(如有),其全部的权利均归属于红朵。红朵不得超过提供产品服务的范围收集上述信息,亦不得超过提供产品服务的范围使用上述信息。
+						</view>
+						<view>
+							2.未经红朵书面同意,任何人不得利用用户列表、用户关系等向用户发送宣传信息,不得擅自保存、备份、泄露、使用或授权他人使用前述运营数据。用户协议终止或解除后,红朵不向用户提供或备份上述数据。
+						</view>
+						<view>3.用户授权红朵在必要的范围内,在提供产品服务过程中使用甲方的个人信息。</view>
+						<view>4.用户注销帐户的,红朵应当及时删除用户的个人信息;用户有权在帐户使用过程中要求删除其个人信息。</view>
+						<view>七、个人信息与隐私保护</view>
+						<view>1.您应当妥善保管好帐户号码及密码等帐户信息,若发现帐户被他人使用的,应当立即通知红朵处理。</view>
+						<view>2.因不可抗力(包括黑客行为、计算机病毒、系统不稳定等)、遭受他人诈骗或您主动泄露、保管疏忽导致账号、密码被他人使用的,红朵不承担任何责任。</view>
+						<view>3.红朵重视对您信息的保护,关于您的个人信息将依照《红朵隐私政策》受到规范和保护,请用户点击并完整阅读 《红朵隐私政策》,以帮助您更好地保护个人信息。</view>
+						<view>八、保密义务</view>
+						<view>
+							1.双方对本协议内容和因签署、履行本协议而获得或接触到的对方商业信息、商业秘密及其他未披露的资料和信息(包括但不限于产品服务内容信息、个人隐私、财务信息、技术信息等)应当严格保密;未经对方书面同意,任何一方不得将上述信息的全部或部分通过任何方式使用或以任何方式泄露给第三方,为履行本协议使用除外:
+						</view>
+						<view>(1)接受方内部有必要了解相关信息的雇员或其关联公司及其雇员;</view>
+						<view>(2)对接受方负有保密义务的事务性合作伙伴,包括律师事务所(律师)、会计师事务所(会计师)、审计事务所(审计师)、评估事务所(评估师)等;</view>
+						<view>(3)依据有关法律法规要求、行政指令等而向政府部门、司法部门、证券交易所或其他监管机构披露。</view>
+						<view>2.未经对方书面同意,不得擅自使用、复制对方的商标、标志、商业信息、技术及其他资料。</view>
+						<view>3.任何一方不得以任何方式直接或间接对另一方进行诋毁性评价、负面/消极评价,或有任何对另一方声誉造成不利影响的行为。</view>
+						<view>4.保密期限至相关信息置于公开领域为止。</view>
+						<view>九、反贿赂</view>
+						<view>1.红朵坚持并认真贯彻国家关于反商业贿赂的各项规定,建立健全反不正当交易行为和反商业贿赂的长效机制。</view>
+						<view>
+							2.红朵坚持严厉打击商业贿赂行为,对违反相关规定的员工,严肃查处、惩罚和教育,情节严重的,移送司法机关处理;对违反相关规定的用户或合作伙伴,一经查实,红朵有权终止对用户提供服务或合作关系,情节严重的,移送司法机关处理。
+						</view>
+						<view>
+							3.红朵愿意在公开透明、公正平等、诚实信用的合作机制下,与所有用户及合作伙伴“共赢”美好明天。真诚希望用户及合作伙伴能理解并支持本红朵的反商业贿赂政策,并欢迎对红朵及员工进行监督,发现相关情况的时候及时向红朵反馈。
+						</view>
+						<view>十、免责条款</view>
+						<view>1.避风港原则</view>
+						<view>(1)用户发布并授权红朵使用的信息、评论、文章、图片等内容应当遵守“用户信息发布规则”,红朵对于用户发布的上述内容不提供担保义务。</view>
+						<view>(2)由于第三方认为用户发表的内容违法、侵权的,相关责任及损害由用户出面主动解决并承担相应的责任,红朵有权及时删除上述内容。</view>
+						<view>2.外部链接</view>
+						<view>
+							红朵原则上不允许在平台中传播任何外部链接,因此不对平台外部链接的可用性、安全性负责,不认可外部链接的任何内容、宣传、产品、服务等,任何人发现外部链接应当及时向红朵举报,由红朵下架处理。
+							任何使用外部链接引起的纠纷与红朵无关,红朵不对任何外部链接承担任何责任。</view>
+						<view>3.心理咨询</view>
+						<view>
+							(1)用户理解、知悉并同意心理咨询的效果取决于咨询师与用户双方配合作用的结果。无论用户采用什么样的咨询方式,红朵、咨询师并不保证用户接受心理咨询后一定能达到某种效果;未达到用户心理预期的心理咨询效果,不属于服务质量问题。心理咨询服务结束后一般不予退款,且红朵、咨询师对服务结果、退费等不承担任何法律责任。
+						</view>
+						<view>
+							(2)在心理咨询服务过程中,由于用户自身原因引起的突发疾病或意外死亡,或由于自身原因引起的自伤、自残、自杀等,红朵及其咨询师尽可能采取措施控制事态恶化,必要时与用户的紧急联系人、相关部门、医疗机构取得联系。由此造成的损害后果由用户本人(或监护人)承担,平台、心理咨询师不承担损害责任。
+						</view>
+
+
+						<view>(3)平台提供的心理咨询服务不属于疾病诊断、治疗或医疗服务,不属于国务院《医疗机构管理条例》的医疗机构,用户同意:不得以接受的心理咨询服务为非医疗服务为由主张任何权利</view>
+
+						<view>
+							(4)有自杀倾向或自杀未遂的用户,应当向危机干预机构或符合法律规定的医疗机构寻求治疗帮助。心理咨询师不提供危机干预或治疗服务,用户因此最终选择自杀、自伤、自残的,责任及后果由用户自行承担。
+						</view>
+
+						<view>(5)用户对心理咨询服务引起的争议、费用纠纷、服务质量纠纷等,争议的解决方案以咨询师和用户协商一致为准,红朵对争议提供平等协商干预,红朵不承担任何责任。</view>
+
+						<view>(6)禁止私下交易:</view>
+
+						<view>① 咨询师与用户不得私下交易(包括但不限于私下转账交易或通过第三方平台交易);</view>
+
+						<view>② 私下交易属于严重根本违约,红朵有权直接注销用户的帐户,因此给甲方造成的任何损失,用户承担全部赔偿责任;</view>
+
+						<view>③ 咨询师与用户因私下交易发生纠纷的,由其双方自行解决,与甲方无关;因此给甲方造成的任何损失,用户承担全部赔偿责任;</view>
+
+						<view>(7)心理咨询服务自完成起超过一年的,红朵不提供争议的平等协商干预,由用户与心理咨询师另行协商解决。</view>
+
+						<view>(8)用户使用平台心理咨询服务的,应当遵守心理咨询相关协议、规则、知情同意书等。</view>
+
+						<view>4.红朵测评</view>
+
+						<view>
+							(1)红朵测评根据心理学一般规则研发,用户完成测试后的结果由系统自动生成,测评结果和个人分析报告仅供用户单方参考,红朵对测评结果不负任何保证义务,不承担任何形式的法律责任;不得用于心理疾病的治疗、心理危机的干预或解除等。
+						</view>
+						<view>(2)当您选择使用红朵的心理测评服务时,请注意以下内容:
+						</view>
+
+						<view>①测评题目可免费查看作答,测评结果报告需付费阅读【价格以产品首页展示为准】;</view>
+
+						<view>②因每个人不同,测评结果亦因人而异,测评结果仅供参考,不作为医学心理诊断;</view>
+
+						<view>③测评过程没有严格的时间限制,但一般建议在20分钟内完成;</view>
+
+						<view>④测评答案无对错之分,且您的作答将得到严格保密,请您放心如实作答。</view>
+
+						<view>(3)非测评系统造成的问题,不予退费。</view>
+
+						<view>
+							5.红朵不保证由不可抗力因素或第三方引起的网络服务及时性、安全性、准确性问题。请用户务必及时保存自己的相关资料,否则因网络服务中断、维修、检修、维护等导致的任何损失,平台不承担相关责任。
+						</view>
+
+						<view>十一、违约责任</view>
+
+						<view>
+							1.协议双方应当严格遵守用户协议及相关服务协议、规则的约定,不得违反协议约定或损害对方合法权益、利用平台损害第三方合法权益。
+						</view>
+						<view>
+							2.协议一方在知悉对方违约时,有权通知违约方采取有效合理的补救措施纠正违约行为,并赔偿守约方损失。违约方在收到通知后七日内仍未纠正违约行为的,守约方有权单方书面解除本协议,注销帐户。
+						</view>
+						<view>3.守约方解除本协议或违约方采取补救措施并不妨碍守约方依照合同约定追究违约方的违约责任。
+						</view>
+						<view>4.违约责任:</view>
+						<view>
+							平台由于违约、侵权等事由造成来访者损害的,损害赔偿额以来访者实际支付款项总额为限承担责任。
+						</view>
+						<view>来访者由于违约、侵权等事由造成咨询师损害的,损害赔偿额以咨询师实际损失为限承担责任。
+						</view>
+						<view>5.损失:即包括但不限于因此对第三方的赔偿金、收入损失、投入成本损失、维权差旅费用、诉讼费、公证费、律师费等。
+						</view>
+
+
+						<view>十二、有效通知及平台联系信息</view>
+						<view>
+							1.用户应当准确填写并及时更新提供给红朵的电子邮箱地址、联系电话、联系地址、邮政编码等联系信息,保证联系信息的有效性,以便红朵通过用户提供的联系信息可以有效、及时联系。
+						</view>
+						<view>2.通过用户提供的联系信息无法与用户取得联系,因此导致平台的任何损失或增加成本、咨询师费用、用户自身损失、未及时知悉服务协议更新等,由用户承担全部责任。
+						</view>
+						<view>3.平台联系信息:</view>
+						<view>
+							收件地址:河南省郑州市高新技术产业开发区西四环206号3号楼
+						</view>
+						<view>收件人:红朵信息科技心理平台部 (收)</view>
+
+						<view>邮编:450000</view>
+
+						<view>电子邮箱:hongduo_tech@126.com</view>
+
+						<view>红朵按以上平台联系信息为有效送达信息,红朵通过用户提供的电子邮箱地址向用户送达的通知等相关文件,属于有效送达。
+						</view>
+
+
+						<view>十三、其他</view>
+						<view>
+							1.本协议双方均无权对外代表另一方明示或默示作出任何陈述或保证。</view>
+						<view>
+							2.本协议的任何条款,因法律、法规、政策变更导致无效或无法执行,该条款的单独无效不影响其他条款的效力,其他条款仍具有法律效力。
+						</view>
+						<view>3.甲乙双方应共同遵守本协议的内容,在执行过程中发生纠纷的,应友好协商,协商不成的,双方均有权向广州市天河区人民法院提起诉讼解决
+						</view>
+					</view>
+					<view style="padding: 10px;">
+						<button class='popup-know' @click="close()">我知道了</button>
+					</view>
+				</view>
+			</uni-popup>
+		</view>
+		<view class='modal_box' v-show='modalFlag'>
+			<view class='modal_box_inner'>
+				<view class='modal_tk'>
+					请阅读<text class="tk_link" @click="toggle()">《测评服务条款》</text>,点击同意并立即测试,开始测评答题!
+				</view>
+				<view class='ty_class' @click='agreeFun()'>同意并进入产品</view>
+				<view class='bty_class' @click="unAgreeFun()">不同意</view>
+			</view>
+
+		</view>
+	</view>
+</template>
+
+<script>
+	export default {
+		data() {
+			return {
+				isChecked: true,
+				isShake: false
+			}
+		},
+		onLoad() {
+			this.createUser();
+		},
+
+		methods: {
+			//查询用户信息
+			createUser() {
+				this.$request
+					.get({
+						url: 'system/temporaryUser',
+						loadingTip: "加载中..."
+					})
+					.then((res) => {
+						sessionStorage.setItem("user", JSON.stringify(res.data.user));
+						sessionStorage.setItem("token", res.data.token);
+					});
+			},
+
+			checkboxChange(e) {
+				this.isChecked = !this.isChecked;
+			},
+			goTest() {
+				if (!this.isChecked) {
+					uni.showToast({
+						icon: 'none',
+						title: '请勾选并同意测评服务条款'
+					})
+					this.isShake = true;
+					setTimeout(() => {
+						this.isShake = false;
+					}, 500);
+					return
+				}
+
+				uni.navigateTo({
+					url: '/newScale/EQtest/testPage'
+				})
+			},
+			toggle() {
+				// open 方法传入参数 等同在 uni-popup 组件上绑定 type属性
+				this.$refs.popup.open()
+			},
+			close() {
+				this.$refs.popup.close();
+				this.isChecked = true;
+			}
+		}
+	}
+</script>
+
+<style scoped>
+	.bg {
+		width: 100%;
+		height: 3707rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/index_bg.png) no-repeat center;
+		background-size: cover;
+		text-align: center;
+	}
+
+	.banner {
+		width: 702rpx;
+	}
+
+	.banner1 {
+		margin: 34rpx auto 12rpx;
+	}
+
+	.banner2 {
+		margin: 12rpx auto 12rpx;
+	}
+
+	.banner3 {
+		margin: 12rpx auto 12rpx;
+	}
+
+	.banner4 {
+		margin: 12rpx auto 12rpx;
+	}
+
+	.contral_box {
+		box-sizing: border-box;
+		width: 100%;
+		height: 182rpx;
+		background: #FFFFFF;
+		overflow: hidden;
+	}
+
+	.agreement {
+		display: flex;
+		align-items: center;
+		font-family: Alibaba PuHuiTi 2.0;
+		font-weight: normal;
+		font-size: 32rpx;
+		color: #333333;
+		line-height: 39rpx;
+		margin: 26rpx 0 16rpx 53rpx;
+	}
+
+	.agreement input {
+		width: 43rpx;
+		height: 43rpx;
+		background: #FFFFFF;
+		border-radius: 4rpx;
+		border: 1px solid #999999;
+		margin-right: 23rpx;
+	}
+
+	.agreement text {
+		color: #0060FF;
+	}
+
+	.go_test {
+		margin: 0 auto;
+		cursor: pointer;
+		margin-bottom: 46rpx;
+	}
+
+	.go_test:active {
+		opacity: 0.8;
+	}
+
+	.go_test img {
+		width: 640rpx
+	}
+
+	::v-deep .uni-checkbox-input {
+		width: 43rpx;
+		height: 43rpx;
+		background: #FFFFFF;
+		border-radius: 4rpx;
+		border: 1px solid #999999;
+	}
+
+	::v-deep uni-checkbox:not([disabled]) .uni-checkbox-input:hover {
+		border-color: #666666;
+	}
+
+	/* 关键帧 */
+	@keyframes shake {
+		0% {
+			transform: translateX(0);
+		}
+
+		10%,
+		30%,
+		50%,
+		70%,
+		90% {
+			transform: translateX(-5px);
+		}
+
+		20%,
+		40%,
+		60%,
+		80% {
+			transform: translateX(5px);
+		}
+
+		100% {
+			transform: translateX(0);
+		}
+	}
+
+	/* 动画class样式 */
+	.shake-animation {
+		animation: shake 0.5s;
+	}
+
+	.popup-main>>>uni-view {
+		max-width: 750rpx;
+
+		margin: auto;
+		/* border-radius: 20px; */
+		border-top-left-radius: 20px;
+		border-top-right-radius: 20px;
+	}
+
+	.popup-detail {
+		padding-left: 20px;
+		padding-right: 20px;
+		font-size: 14px;
+		line-height: 20px;
+		/* letter-spacing: 4px; */
+		overflow-y: auto;
+		height: 78vh;
+		border-top-left-radius: 0px !important;
+		border-top-right-radius: 0px !important;
+		text-align: left;
+		/* overflow-y: auto;
+		height:90vh; */
+	}
+
+	.popup-title-cla {
+		text-align: center;
+		font-size: 20px;
+		font-weight: 700;
+		margin-bottom: 20px !important;
+	}
+
+	.popup-cuo {
+		padding: 20rpx;
+		text-align: right;
+	}
+
+	.popup-know {
+		width: 92%;
+		background: linear-gradient(269deg, #FF443D 0%, #FF9C00 100%);
+		color: #ffffff;
+		border-radius: 20px;
+
+	}
+</style>

+ 204 - 0
newScale/EQtest/paySuccess.vue

@@ -0,0 +1,204 @@
+<template>
+	<view class="pay_success_bg">
+		<view class="register_bg">
+			<view class="phone_input"><input @blur="checkPhone()" v-model="phone" type="tel" placeholder="请输入手机号">
+			</view>
+			<view class="password_input"><input v-model="code" type="number" placeholder="请输入验证码">
+				<view class="sms" @click='sendCode()'>{{sendCodeFlag}}</view>
+			</view>
+		</view>
+
+		<view class="send_btn" @click="saveAndView">
+			<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/register_save_bg.png"
+				mode="widthFix"></image>
+		</view>
+	</view>
+</template>
+
+<script>
+	export default {
+		data() {
+			return {
+				phone: '',
+				code: '',
+				//时间标志
+				time: null,
+				//倒计时数字
+				timeCount: 60,
+				//显示倒计时还是发送验证码
+				sendCodeFlag: '发送验证码',
+				userId: '',
+				resultId: ''
+			}
+		},
+		onLoad(options) {
+			if (options.userId && options.resultId) {
+				this.userId = options.userId;
+				this.resultId = options.resultId;
+			}
+		},
+		onUnload() {
+			clearInterval(this.time);
+			this.phone = '';
+			this.code = '';
+			this.timeCount = 60;
+			this.sendCodeFlag = '发送验证码';
+		},
+		methods: {
+			//对手机号进行校验
+			checkPhone() {
+				var phoneReg = /^[1][3,4,5,7,8][0-9]{9}$/;
+				if (phoneReg.test(this.phone)) {
+					return true;
+				} else {
+					uni.showToast({
+						title: '请输入正确手机号',
+						icon: 'error'
+					})
+					return false;
+				}
+			},
+			sendCode() {
+				if (!this.checkPhone()) {
+					return;
+				}
+				if (this.sendCodeFlag == '重新发送' || this.sendCodeFlag == '发送验证码') {
+					this.timeCount = 60;
+					clearInterval(this.time)
+					//起一个定时器开始倒计时
+					this.sendCodeFlag = this.timeCount + 's'
+					this.time = setInterval(() => {
+						this.timeCount -= 1;
+						this.sendCodeFlag = this.timeCount + 's'
+						//如果倒计时为0时则停止倒计时
+						if (this.timeCount == 0) {
+							clearInterval(this.time)
+							this.sendCodeFlag = '重新发送'
+						}
+					}, 1000)
+					this.$request.get({
+						url: 'user/authCode',
+						loadingTip: "加载中...",
+						data: {
+							phone: this.phone
+						},
+					}).then((res) => {
+						uni.showToast({
+							title: '验证码已发送',
+							icon: 'success',
+						})
+					})
+				}
+
+			},
+			// 绑定手机号并查看报告
+			saveAndView() {
+				// 校验手机和验证码格式
+				if (this.checkPhone() && this.code != '') {
+					this.$request.get({
+						url: 'user/updateMobile',
+						loadingTip: "加载中...",
+						data: {
+							id: this.userId,
+							mobile: this.phone,
+							authCode: this.code,
+						},
+					}).then((res) => {
+						if (res.code == 200) {
+							uni.showToast({
+								icon: 'success',
+								title: '保存成功'
+							})
+							uni.navigateTo({
+								url: `/newScale/EQtest/testResult?resultId=${this.resultId}&messageShare=1`
+							});
+						} else {
+							uni.showToast({
+								icon: 'none',
+								title: res.msg
+							})
+						}
+					})
+				}
+			}
+		}
+	}
+</script>
+
+<style scoped>
+	.pay_success_bg {
+		width: 100%;
+		min-height: 100vh;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/pay_success_bg.png) no-repeat top;
+		background-size: 100% auto;
+		overflow: hidden;
+	}
+
+	.register_bg {
+		width: 750rpx;
+		height: 498rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/register_bg.png) no-repeat top;
+		background-size: 100% auto;
+		margin-top: 348rpx;
+		overflow: hidden;
+	}
+
+	.register_bg input {
+		border: 0;
+		outline: none;
+		width: 529rpx;
+		height: 91rpx;
+		padding-left: 100rpx;
+	}
+
+	.phone_input input {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/phone_input_bg.png) no-repeat top;
+		background-size: cover;
+		margin: 176rpx 0 0 64rpx;
+	}
+
+
+	.password_input {
+		margin: 29rpx 0 0 64rpx;
+		position: relative;
+	}
+
+	.password_input input {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/password_input_bg.png) no-repeat top;
+		background-size: auto 100%;
+	}
+
+	::v-deep .uni-input-input::placeholder {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 32rpx;
+		color: #999999;
+		line-height: 64rpx;
+	}
+
+	.sms {
+		width: 168rpx;
+		background: #FFEDF1;
+		border-radius: 36rpx;
+		position: absolute;
+		top: 9rpx;
+		right: 71rpx;
+		font-family: Alibaba PuHuiTi 2.0;
+		font-weight: bold;
+		font-size: 24rpx;
+		color: #EC3126;
+		line-height: 71rpx;
+		text-align: center;
+	}
+
+	.sms:active {
+		border: 1rpx solid #cccccc;
+		opacity: 0.8;
+		border-radius: 40rpx;
+	}
+
+	.send_btn {
+		margin: 340rpx auto 0;
+		text-align: center;
+	}
+</style>

+ 187 - 0
newScale/EQtest/paymentPage.vue

@@ -0,0 +1,187 @@
+<template>
+	<view class="payment_bg">
+		<view class="payment_info">
+			<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/pay_scale_info.png"
+				mode="widthFix">
+			</image>
+			<image class="pay_btn_sm" @click="getH5Pay()"
+				src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/pay_button.png"
+				mode="widthFix"></image>
+		</view>
+		<view class="payment_desc">
+			<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/pay_scale_desc.png"
+				mode="widthFix"></image>
+		</view>
+		<view class="pay_btn_lg" @click="getH5Pay()">
+			<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/pay_button.png"
+				mode="widthFix"></image>
+		</view>
+		<uni-popup ref="popup" type="center">
+			<view class="pay_dialog">
+				<view class="btn_area">
+					<image style="margin-right: 89rpx;" @click="completeHandle"
+						src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/complete_btn.png"
+						mode="widthFix"></image>
+					<image @click="uncompleteHandle"
+						src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/uncomplete_btn.png"
+						mode="widthFix"></image>
+				</view>
+			</view>
+		</uni-popup>
+	</view>
+</template>
+
+<script>
+	export default {
+		data() {
+			return {
+				orderNo: '',
+				orderInfo: {}
+			}
+		},
+		onLoad(options) {
+			if (options.orderNo) {
+				this.orderNo = options.orderNo;
+				setTimeout(() => {
+					this.payEnsure();
+				}, 200)
+			}
+			this.orderInfo = JSON.parse(sessionStorage.getItem('orderInfo'));
+			// this.queryPromotion();
+		},
+		methods: {
+			// 检查订单支付状态
+			checkOrder() {
+				let _this = this;
+				_this.$request
+					.get({
+						url: `api/wx-pay/queryOrder/${_this.orderNo}`,
+						loadingTip: "加载中...",
+						data: {},
+					})
+					.then((res) => {
+						uni.hideLoading();
+						let resultInfo = JSON.parse(res.data.body)
+						if (resultInfo.trade_state === 'SUCCESS') {
+							uni.navigateTo({
+								url: `/newScale/EQtest/paySuccess?resultId=${this.orderInfo.resultId}&userId=${this.orderInfo.userId}`
+							});
+						} else {
+							return;
+						}
+					});
+			},
+			// 拉起微信支付
+			getH5Pay(params) {
+				let _this = this;
+				_this.$request
+					.get({
+						url: `api/orderInfo/queryOrderByResultIdAndUserId/${this.orderInfo.userId}/${this.orderInfo.resultId}`,
+						loadingTip: "加载中...",
+						data: {},
+					})
+					.then((res) => {
+						uni.hideLoading();
+						if (res.data && res.data.orderStatus === '支付成功') {
+							uni.navigateTo({
+								url: `/newScale/EQtest/paySuccess?resultId=${this.orderInfo.resultId}&userId=${this.orderInfo.userId}`
+							});
+						} else {
+							// _this.orderInfo.total = _this.hasDiscount ? _this.promotionPrice : _this.price;
+							_this.$request
+								.post({
+									url: "api/wx-pay/h5Pay",
+									loadingTip: "加载中...",
+									data: _this.orderInfo
+								})
+								.then((res) => {
+									let redirect_url = encodeURI(
+										`http://hnhong-duo.com/webo/newScale/EQtest/paymentPage?orderNo=${res.data.orderNo}`
+									)
+									window.location.href = `${res.data.codeUrl}&redirect_url=${redirect_url}`;
+								});
+						}
+					});
+			},
+			payEnsure() {
+				// console.log(11111)
+				this.$refs.popup.open();
+			},
+			uncompleteHandle() {
+				this.$refs.popup.close();
+			},
+			completeHandle() {
+				uni.showLoading({
+					title: ''
+				});
+				this.checkOrder();
+			}
+		}
+	}
+</script>
+
+<style scoped>
+	.payment_bg {
+		width: 100%;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/payment_bg.png) no-repeat top;
+		background-size: 100% auto;
+		overflow: hidden;
+	}
+
+	.payment_info {
+		position: relative;
+	}
+
+	.payment_info image {
+		width: 100%;
+		height: auto;
+		margin-top: 134rpx;
+	}
+
+	.payment_info .pay_btn_sm {
+		width: 640rpx;
+		height: auto;
+		position: absolute;
+		left: 55rpx;
+		bottom: 73rpx;
+	}
+
+	.payment_desc image {
+		width: 750rpx;
+	}
+
+	.pay_btn_lg {
+		width: 640rpx;
+		margin: 40rpx auto;
+	}
+
+	.pay_btn_lg image {
+		width: 640rpx;
+		height: auto;
+	}
+
+	.pay_dialog {
+		width: 594rpx;
+		height: 360rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/paymentPage/dialog_bg.png) no-repeat top;
+		background-size: cover;
+		position: fixed;
+		top: 540rpx;
+		left: 50%;
+		transform: translate(-50%, -50%);
+	}
+
+	.btn_area {
+		margin: 240rpx 0 0 96rpx;
+	}
+
+	.btn_area image {
+		width: 160rpx;
+	}
+
+	.btn_area image:active {
+		border: 1rpx solid #999999;
+		opacity: 0.8;
+		border-radius: 28rpx;
+	}
+</style>

+ 386 - 0
newScale/EQtest/testPage.vue

@@ -0,0 +1,386 @@
+<template>
+	<view class="bg">
+		<view class="process">
+			<view class="process_bar" :style="{'width': percentage}"></view>
+		</view>
+		<view class="tips">请选择你的答案进入下一题</view>
+		<view class="question_box">
+			<view class="question_num">
+				<text class="num_front">出题{{currentIndex + 1}}</text><text class="num_behond">/{{maxLength}}</text>
+			</view>
+			<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/prev_mark.png" mode="widthFix">
+			</image>
+			<text class="qs_txt">{{currentQuestion.answer}}</text>
+			<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/next_mark.png" mode="widthFix">
+			</image>
+		</view>
+		<view class="answer_list">
+			<view :class="['answer', `answer${index+1}`, checkActive(item)]" v-for="(item, index) in currentAnswerList"
+				@click="nextHandle(item)">
+				{{item}}
+			</view>
+		</view>
+		<view class="contral_box">
+			<view class="prev_btn" v-show="currentIndex > 0" @click="prevHandle">
+				<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/prev_btn.png"
+					mode="widthFix"></image>
+			</view>
+			<view class="prev_btn" v-show="currentIndex >= maxLength - 1" @click="submitResult">
+				<image src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/submit_btn.png"
+					mode="widthFix"></image>
+			</view>
+		</view>
+	</view>
+</template>
+
+<script>
+	import {
+		getResult,
+		queryPromotionBySubjectId,
+	} from "@/api/index.js";
+	export default {
+		data() {
+			return {
+				isChecked: true,
+				isShake: false,
+				scaleDetail: {},
+				questionList: [],
+				currentQuestion: {},
+				currentAnswerList: [],
+				currentIndex: 0,
+				userAnswerList: [],
+				resultId: '',
+				isLoading: false,
+				isDisbale: false,
+				maxLength: 0,
+				userInfo: {}
+			}
+		},
+		computed: {
+			percentage() {
+				return ((this.currentIndex / this.maxLength) * 652) + 'rpx'
+			}
+		},
+		created() {
+			this.loadData();
+			this.userInfo = JSON.parse(sessionStorage.getItem("user"));
+		},
+		methods: {
+			loadData() {
+				this.$request
+					.get({
+						url: `scaleInfo/20210820143117`,
+						loadingTip: "加载中...",
+						data: {},
+					}).then((res) => {
+						this.questionList = JSON.parse(JSON.stringify(res.data));
+						this.maxLength = this.questionList.length;
+						this.currentQuestion = this.questionList[this.currentIndex];
+						this.currentAnswerList = this.getAnswerItem(this.currentQuestion.checkItems);
+					})
+			},
+
+			nextHandle(str) {
+				if (this.isDisbale) {
+					return;
+				}
+				this.isDisbale = true;
+				this.userAnswerList[this.currentIndex] = JSON.parse(JSON.stringify(this.currentQuestion));
+				this.userAnswerList[this.currentIndex].checkItems = str;
+				if (this.currentIndex >= this.questionList.length - 1) {
+					this.isDisbale = false;
+					// this.submitResult();
+				} else {
+					setTimeout(() => {
+						this.currentIndex++;
+						this.currentQuestion = this.questionList[this.currentIndex];
+						this.currentAnswerList = this.getAnswerItem(this.currentQuestion.checkItems);
+						this.isDisbale = false;
+					}, 150)
+				}
+			},
+
+			prevHandle() {
+				setTimeout(() => {
+					this.currentIndex--;
+					this.currentQuestion = this.questionList[this.currentIndex];
+					this.currentAnswerList = this.getAnswerItem(this.currentQuestion.checkItems);
+				}, 300)
+			},
+
+			submitResult() {
+				let _this = this;
+				if (_this.isLoading) {
+					return
+				}
+				_this.isLoading = true;
+				let params = {
+					testPlanId: "",
+					scale_result: _this.userAnswerList,
+					userId: JSON.parse(sessionStorage.getItem("user")).id,
+				};
+
+				uni.showLoading({
+					title: "测试结果生成中",
+				});
+				_this.$request
+					.post({
+						url: `${getResult}/20210820143117`,
+						loadingTip: "加载中...",
+						data: params,
+					})
+					.then((res) => {
+						_this.resultId = res.data;
+						// _this.$request.post({
+						// 	url: `scaleExternalSource/save`,
+						// 	loadingTip: "加载中...",
+						// 	data: {
+						// 		currentUserId: _this.userInfo.id,
+						// 		resultId: _this.resultId,
+						// 		uid: _this.uid,
+						// 		source: _this.source,
+						// 		flag: _this.flag,
+						// 	},
+						// }).then(() => {})
+						uni.hideLoading();
+						// 答题结束获取支付
+						_this.getQueryPromotionBySubjectId();
+						_this.isLoading = false;
+					})
+					.catch(() => {
+						uni.showToast({
+							icon: "none",
+							title: "提交失败",
+						});
+						uni.hideLoading();
+						_this.isLoading = false;
+					});
+			},
+
+			// 获取支付金额
+			async getQueryPromotionBySubjectId() {
+				let _this = this;
+				let urls = queryPromotionBySubjectId + "/20210820143117";
+				await _this.$request
+					.get({
+						url: urls,
+						loadingTip: "加载中...",
+						data: {},
+					})
+					.then((res) => {
+						console.log('量表支付信息', res.data);
+						let data = res.data;
+						if (data.price == 0) {
+							uni.navigateTo({
+								url: `/newScale/EQtest/testResult?resultId=${_this.resultId}&messageShare=1`
+							});
+						} else {
+							let params = {
+								productId: '20210820143117',
+								userId: _this.userInfo?.id,
+								resultId: _this.resultId,
+								description: data.name,
+								total: data.price,
+								sceneType: uni.getSystemInfoSync().platform == "android" ?
+									"Android" : "iOS",
+							};
+							sessionStorage.setItem('orderInfo', JSON.stringify(params))
+							uni.navigateTo({
+								url: "/newScale/EQtest/paymentPage",
+							});
+						}
+					});
+			},
+
+			checkActive(item) {
+				return this.userAnswerList[this.currentIndex] && this.userAnswerList[this.currentIndex]
+					.checkItems ==
+					item ? 'active' : ''
+			},
+
+			getAnswerItem(arr) {
+				return arr.split(';')
+			}
+		}
+	}
+</script>
+
+<style scoped>
+	.bg {
+		width: 100%;
+		min-height: 100vh;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/testPage_bg.png) no-repeat top;
+		background-size: 100% auto;
+		overflow: hidden;
+	}
+
+	.tips {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 24rpx;
+		color: #FFFFFF;
+		line-height: 55rpx;
+		margin: 11rpx 0 0 44rpx;
+	}
+
+	.question_box {
+		box-sizing: border-box;
+		width: 726.9rpx;
+		height: 332rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/question_bg.png);
+		background-size: cover;
+		margin: -28rpx 11rpx 54rpx 11rpx;
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 40rpx;
+		color: #333333;
+		padding: 96rpx 30rpx 0 54rpx;
+		position: relative;
+	}
+
+	.question_box text.qs_txt {
+		padding-bottom: 4rpx;
+		border-bottom: 4rpx solid #2E9AFF;
+		line-height: 1.6;
+	}
+
+	.question_box image {
+		width: 43rpx;
+		margin: 0 10rpx;
+	}
+
+	.answer {
+		width: 577rpx;
+		padding: 0 50rpx 0 70rpx;
+		line-height: 126rpx;
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 36rpx;
+		color: #333333;
+		text-align: center;
+		margin: 0 auto 28rpx;
+		cursor: pointer;
+	}
+
+	.answer1 {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_A.png);
+		background-size: cover;
+	}
+
+	.answer1.active {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_A_active.png);
+		background-size: cover;
+		color: #FFFFFF;
+	}
+
+	.answer2 {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_B.png);
+		background-size: cover;
+	}
+
+	.answer2.active {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_B_active.png);
+		background-size: cover;
+		color: #FFFFFF;
+	}
+
+	.answer3 {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_C.png);
+		background-size: cover;
+	}
+
+	.answer3.active {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_C_active.png);
+		background-size: cover;
+		color: #FFFFFF;
+	}
+
+	.answer4 {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_D.png);
+		background-size: cover;
+	}
+
+	.answer4.active {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_D_active.png);
+		background-size: cover;
+		color: #FFFFFF;
+	}
+
+	.answer5 {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_E.png);
+		background-size: cover;
+	}
+
+	.answer5.active {
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/answer_E_active.png);
+		background-size: cover;
+		color: #FFFFFF;
+	}
+
+	.prev_btn {
+		text-align: center;
+	}
+
+	.process {
+		box-sizing: border-box;
+		width: 656rpx;
+		height: 25rpx;
+		padding: 2rpx;
+		background: #FFFFFF;
+		border-radius: 12rpx;
+		border: 1px solid #2E9AFF;
+		overflow: hidden;
+		margin: 190rpx auto 0;
+	}
+
+	.process_bar {
+		height: 19rpx;
+		background: #2E9AFF;
+		border-radius: 10rpx;
+		transition: width 200ms linear;
+	}
+
+	.question_num {
+		width: 238rpx;
+		line-height: 70rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/question_num_bg.png) no-repeat;
+		background-size: cover;
+		text-align: center;
+		position: absolute;
+		top: 30rpx;
+		left: 20rpx;
+	}
+
+	.num_front {
+		font-family: Alibaba PuHuiTi 2.0;
+		font-weight: bold;
+		font-size: 48rpx;
+		color: #FEFEFE;
+		line-height: 55rpx;
+		text-shadow: 2rpx 2rpx 5rpx rgba(146, 9, 0, 0.45);
+	}
+
+	.num_behond {
+		font-family: Alibaba PuHuiTi 2.0;
+		font-weight: bold;
+		font-size: 32rpx;
+		color: #FEFEFE;
+		line-height: 55rpx;
+		text-shadow: 2rpx 2rpx 5rpx rgba(146, 9, 0, 0.45);
+	}
+
+	.contral_box {
+		display: flex;
+		justify-content: center;
+		align-items: center;
+		margin: 54rpx 0 100rpx 0;
+	}
+
+	.prev_btn {
+		margin: 0 20rpx;
+	}
+
+	.prev_btn image {
+		width: 304rpx;
+	}
+</style>

+ 356 - 0
newScale/EQtest/testResult.vue

@@ -0,0 +1,356 @@
+<template>
+	<view class="bg">
+		<view class="process"></view>
+		<view class="role_name">
+			<image v-if="roleList[role]" :src="roleList[role].name" mode="widthFix"></image>
+		</view>
+		<view class="role_img">
+			<image v-if="roleList[role]" :src="roleList[role].img" mode="widthFix"></image>
+		</view>
+		<view class="result_box">
+			<view class="role_medal">
+				<image v-if="roleList[role]" :src="roleList[role].medal" mode="widthFix"></image>
+			</view>
+			<view class="score_bar">
+				<view class="score_title">得分:</view>
+				<view class="score_num">{{scoreNum}}</view>
+				<view class="score_percentage">
+					<view class="score_wrap">
+						<image class="process_bar" :style="{'left': percentage}"
+							src="https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/process_bar.png"
+							mode="widthFix"></image>
+					</view>
+					<view class="score_text">{{parseInt(scoreNum)}}/{{parseInt(maxScore)}}</view>
+				</view>
+			</view>
+			<view class="result_box_r">
+				<view class="sm_title">说明:</view>
+				{{improvementSuggestions}}
+			</view>
+		</view>
+		<view class="report_desc">
+			<view class="desc_title">报告阅读说明</view>
+			<view class="desc_txt">谢谢您的参与,阅读本报告时,请注意以下内容:</view>
+			<view style="display: flex;">
+				<view class="list_mark">·</view>
+				<view class="desc_txt">本结果仅供参考,不可作为临床诊断的依据
+					如对报告有不理解的地方,建议向专业资质人员进行咨询;
+					如结果与你自己或他人感知的有出入,可回忆在测试时是否有事情影响到你,或自己答辩时是否有所顾虑。</view>
+			</view>
+		</view>
+	</view>
+</template>
+
+<script>
+	import {
+		getRecordById
+	} from "@/api/index.js";
+	import {
+		photoUrl
+	} from "@/common/config.js"
+	const photoUrls = `${photoUrl}api/show?filePath=./webo`
+	console.log(photoUrl);
+	export default {
+		data() {
+			return {
+				isChecked: true,
+				isShake: false,
+				scaleDetail: {},
+				questionList: [],
+				currentQuestion: {},
+				currentAnswerList: [],
+				currentIndex: 0,
+				userAnswerList: [],
+				resultId: '',
+				isLoading: false,
+				isDisbale: false,
+				roleList: {
+					'小菜鸟': {
+						name: `${photoUrls}/EQtest/role/role_name1.png`,
+						img: `${photoUrls}/EQtest/role/role_img1.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal1.png`
+					},
+					'学徒': {
+						name: `${photoUrls}/EQtest/role/role_name2.png`,
+						img: `${photoUrls}/EQtest/role/role_img2.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal1.png`
+					},
+					'学徒+': {
+						name: `${photoUrls}/EQtest/role/role_name3.png`,
+						img: `${photoUrls}/EQtest/role/role_img2.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal2.png`
+					},
+					'探索者': {
+						name: `${photoUrls}/EQtest/role/role_name4.png`,
+						img: `${photoUrls}/EQtest/role/role_img3.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal3.png`
+					},
+					'探索者+': {
+						name: `${photoUrls}/EQtest/role/role_name5.png`,
+						img: `${photoUrls}/EQtest/role/role_img3.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal3.png`
+					},
+					'达人': {
+						name: `${photoUrls}/EQtest/role/role_name6.png`,
+						img: `${photoUrls}/EQtest/role/role_img4.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal4.png`
+					},
+					'达人+': {
+						name: `${photoUrls}/EQtest/role/role_name7.png`,
+						img: `${photoUrls}/EQtest/role/role_img4.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal4.png`
+					},
+					'大师': {
+						name: `${photoUrls}/EQtest/role/role_name8.png`,
+						img: `${photoUrls}/EQtest/role/role_img5.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal5.png`
+					},
+					'大师+': {
+						name: `${photoUrls}/EQtest/role/role_name9.png`,
+						img: `${photoUrls}/EQtest/role/role_img5.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal5.png`
+					},
+					'导师': {
+						name: `${photoUrls}/EQtest/role/role_name10.png`,
+						img: `${photoUrls}/EQtest/role/role_img6.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal6.png`
+					},
+					'智者': {
+						name: `${photoUrls}/EQtest/role/role_name11.png`,
+						img: `${photoUrls}/EQtest/role/role_img7.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal7.png`
+					},
+					'巨匠': {
+						name: `${photoUrls}/EQtest/role/role_name12.png`,
+						img: `${photoUrls}/EQtest/role/role_img8.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal8.png`
+					},
+					'大帝': {
+						name: `${photoUrls}/EQtest/role/role_name13.png`,
+						img: `${photoUrls}/EQtest/role/role_img9.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal9.png`
+					},
+					'至尊': {
+						name: `${photoUrls}/EQtest/role/role_name14.png`,
+						img: `${photoUrls}/EQtest/role/role_img10.png`,
+						medal: `${photoUrls}/EQtest/role/role_medal10.png`
+					}
+				},
+				resultId: '',
+				role: '',
+				improvementSuggestions: '',
+				scoreNum: 0,
+				maxScore: 0,
+				minScore: 0,
+			}
+		},
+		onLoad(options) {
+			if (options && options.resultId) {
+				this.resultId = options.resultId
+				this.getScaleTestResults(this.resultId);
+			}
+		},
+		computed: {
+			percentage() {
+				return (((this.scoreNum / this.maxScore) * 318) - 318) + 'rpx'
+			}
+		},
+		methods: {
+			getScaleTestResults(id) {
+				let _this = this;
+				_this.$request
+					.get({
+						url: `${getRecordById}?id=${id}`,
+						loadingTip: "加载中...",
+						data: {},
+					})
+					.then(
+						(res) => {
+							console.log('222:', new Date().getTime());
+							console.log("----测试结果--->", res);
+							if (res.code == 200) {
+								_this.testResult = JSON.parse(res.data?.userRecordEntity?.testResult)[0];
+								_this.role = _this.testResult.newTableContext.result[0].symptom;
+								_this.improvementSuggestions = _this.testResult.newTableContext.result[0]
+									.improvementSuggestions;
+								_this.scoreNum = _this.testResult.newTableContext.result[0].score;
+								_this.maxScore = _this.testResult.newTableContext.result[0].maxScore;
+							} else if (res.code == 401) {} else {
+								(res) => {
+									console.log("世界上绝对绝对绝对绝对绝对的", res);
+								}
+							}
+						})
+			},
+			getNameUrl() {
+				console.log(`${photoUrls}/EQtest/role/role_name1.png`)
+				return `${photoUrls}/EQtest/role/role_name1.png`
+			}
+		}
+	}
+</script>
+
+<style scoped>
+	.bg {
+		width: 100%;
+		height: 1900rpx;
+		background: url(../../static/images/EQtest/test_result_bg.png) no-repeat center;
+		background-size: 100% auto;
+	}
+
+	.role_name {
+		text-align: center;
+		margin: 202rpx 0 0 0;
+	}
+
+	.role_name image {
+		width: 308rpx;
+	}
+
+	.role_img {
+		text-align: center;
+		margin-top: -30rpx;
+	}
+
+	.role_img image {
+		width: 660rpx;
+	}
+
+	.result_box {
+		width: 706rpx;
+		min-height: 548rpx;
+		padding-bottom: 28rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/result_bg_w.png) no-repeat top;
+		background-size: 100% auto;
+		overflow: hidden;
+		margin: -180rpx auto 0;
+		border-radius: 33rpx;
+		position: relative;
+	}
+
+	.role_medal {
+		position: absolute;
+		right: 50rpx;
+		top: 0;
+	}
+
+	.role_medal image {
+		width: 155rpx;
+	}
+
+	.result_box_r {
+		box-sizing: border-box;
+		width: 660rpx;
+		min-height: 283rpx;
+		padding: 26rpx;
+		margin: 0 auto 0;
+		background: #FFFFFF url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/result_bg_r.png) no-repeat top;
+		background-size: 100% auto;
+		border-radius: 33rpx;
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 32rpx;
+		color: #333333;
+		line-height: 46rpx;
+	}
+
+	.score_title {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: bold;
+		font-size: 36rpx;
+		color: #333333;
+		line-height: 31rpx;
+		margin: 104rpx 0 0 42rpx;
+	}
+
+	.score_num {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: bold;
+		font-size: 81rpx;
+		color: #FF9C00;
+		line-height: 51rpx;
+		/* text-stroke: 4px #FFFFFF;
+		background: linear-gradient(269deg, #FF443D 0%, #FF9C00 100%);
+		-webkit-text-stroke: 4px #FFFFFF;
+		-webkit-background-clip: text;
+		-webkit-text-fill-color: transparent; */
+		margin: 12rpx 0 7rpx 156rpx;
+	}
+
+	.score_percentage {
+		display: flex;
+		align-items: center;
+		justify-content: space-between;
+		width: 420rpx;
+		height: 46rpx;
+		background: url(https://test.jue-ming.com:8849/api/show?filePath=./webo/EQtest/score_percentage_bg.png) no-repeat center;
+		background-size: cover;
+		margin: 14rpx 0 8rpx 72rpx;
+	}
+
+	.score_wrap {
+		width: 318rpx;
+		height: 32rpx;
+		margin: 0 0 0 12rpx;
+		position: relative;
+		overflow-x: hidden;
+		border-radius: 25rpx;
+	}
+
+	.process {
+		overflow: hidden;
+	}
+
+	.process_bar {
+		width: 318rpx;
+		position: absolute;
+		left: -100rpx;
+	}
+
+	.score_text {
+		font-family: Alibaba PuHuiTi 2.0;
+		font-weight: normal;
+		font-size: 18rpx;
+		color: #FFFFFF;
+		line-height: 16rpx;
+		margin-right: 16rpx;
+	}
+
+	.sm_title {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: bold;
+		font-size: 36rpx;
+		color: #333333;
+		line-height: 31rpx;
+		margin-bottom: 22rpx;
+	}
+
+	.report_desc {
+		margin: 30rpx 38rpx;
+		letter-spacing: 1px;
+	}
+
+	.desc_title {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 28rpx;
+		color: #FFFFFF;
+		line-height: 46rpx;
+	}
+
+	.desc_txt {
+		font-family: 'Alibaba PuHuiTi 2.0';
+		font-weight: normal;
+		font-size: 20rpx;
+		color: #FFFFFF;
+		line-height: 30rpx;
+	}
+
+	.list_mark {
+		line-height: 0.7;
+		font-size: 36rpx;
+		color: #ffffff;
+		font-weight: 600;
+		margin-right: 5rpx;
+	}
+</style>

+ 15 - 0
node_modules/.bin/mkdirp

@@ -0,0 +1,15 @@
+#!/bin/sh
+basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
+
+case `uname` in
+    *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
+esac
+
+if [ -x "$basedir/node" ]; then
+  "$basedir/node"  "$basedir/../mkdirp/bin/cmd.js" "$@"
+  ret=$?
+else 
+  node  "$basedir/../mkdirp/bin/cmd.js" "$@"
+  ret=$?
+fi
+exit $ret

+ 17 - 0
node_modules/.bin/mkdirp.cmd

@@ -0,0 +1,17 @@
+@ECHO off
+SETLOCAL
+CALL :find_dp0
+
+IF EXIST "%dp0%\node.exe" (
+  SET "_prog=%dp0%\node.exe"
+) ELSE (
+  SET "_prog=node"
+  SET PATHEXT=%PATHEXT:;.JS;=;%
+)
+
+"%_prog%"  "%dp0%\..\mkdirp\bin\cmd.js" %*
+ENDLOCAL
+EXIT /b %errorlevel%
+:find_dp0
+SET dp0=%~dp0
+EXIT /b

+ 18 - 0
node_modules/.bin/mkdirp.ps1

@@ -0,0 +1,18 @@
+#!/usr/bin/env pwsh
+$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
+
+$exe=""
+if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
+  # Fix case when both the Windows and Linux builds of Node
+  # are installed in the same directory
+  $exe=".exe"
+}
+$ret=0
+if (Test-Path "$basedir/node$exe") {
+  & "$basedir/node$exe"  "$basedir/../mkdirp/bin/cmd.js" $args
+  $ret=$LASTEXITCODE
+} else {
+  & "node$exe"  "$basedir/../mkdirp/bin/cmd.js" $args
+  $ret=$LASTEXITCODE
+}
+exit $ret

+ 15 - 0
node_modules/.bin/rimraf

@@ -0,0 +1,15 @@
+#!/bin/sh
+basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
+
+case `uname` in
+    *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
+esac
+
+if [ -x "$basedir/node" ]; then
+  "$basedir/node"  "$basedir/../rimraf/bin.js" "$@"
+  ret=$?
+else 
+  node  "$basedir/../rimraf/bin.js" "$@"
+  ret=$?
+fi
+exit $ret

+ 17 - 0
node_modules/.bin/rimraf.cmd

@@ -0,0 +1,17 @@
+@ECHO off
+SETLOCAL
+CALL :find_dp0
+
+IF EXIST "%dp0%\node.exe" (
+  SET "_prog=%dp0%\node.exe"
+) ELSE (
+  SET "_prog=node"
+  SET PATHEXT=%PATHEXT:;.JS;=;%
+)
+
+"%_prog%"  "%dp0%\..\rimraf\bin.js" %*
+ENDLOCAL
+EXIT /b %errorlevel%
+:find_dp0
+SET dp0=%~dp0
+EXIT /b

+ 18 - 0
node_modules/.bin/rimraf.ps1

@@ -0,0 +1,18 @@
+#!/usr/bin/env pwsh
+$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
+
+$exe=""
+if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
+  # Fix case when both the Windows and Linux builds of Node
+  # are installed in the same directory
+  $exe=".exe"
+}
+$ret=0
+if (Test-Path "$basedir/node$exe") {
+  & "$basedir/node$exe"  "$basedir/../rimraf/bin.js" $args
+  $ret=$LASTEXITCODE
+} else {
+  & "node$exe"  "$basedir/../rimraf/bin.js" $args
+  $ret=$LASTEXITCODE
+}
+exit $ret

+ 10 - 0
node_modules/@gar/promisify/LICENSE.md

@@ -0,0 +1,10 @@
+The MIT License (MIT)
+
+Copyright © 2020-2022 Michael Garvin
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+

+ 65 - 0
node_modules/@gar/promisify/README.md

@@ -0,0 +1,65 @@
+# @gar/promisify
+
+### Promisify an entire object or class instance
+
+This module leverages es6 Proxy and Reflect to promisify every function in an
+object or class instance.
+
+It assumes the callback that the function is expecting is the last
+parameter, and that it is an error-first callback with only one value,
+i.e. `(err, value) => ...`. This mirrors node's `util.promisify` method.
+
+In order that you can use it as a one-stop-shop for all your promisify
+needs, you can also pass it a function.  That function will be
+promisified as normal using node's built-in `util.promisify` method.
+
+[node's custom promisified
+functions](https://nodejs.org/api/util.html#util_custom_promisified_functions)
+will also be mirrored, further allowing this to be a drop-in replacement
+for the built-in `util.promisify`.
+
+### Examples
+
+Promisify an entire object
+
+```javascript
+
+const promisify = require('@gar/promisify')
+
+class Foo {
+  constructor (attr) {
+    this.attr = attr
+  }
+
+  double (input, cb) {
+    cb(null, input * 2)
+  }
+
+const foo = new Foo('baz')
+const promisified = promisify(foo)
+
+console.log(promisified.attr)
+console.log(await promisified.double(1024))
+```
+
+Promisify a function
+
+```javascript
+
+const promisify = require('@gar/promisify')
+
+function foo (a, cb) {
+  if (a !== 'bad') {
+    return cb(null, 'ok')
+  }
+  return cb('not ok')
+}
+
+const promisified = promisify(foo)
+
+// This will resolve to 'ok'
+promisified('good')
+
+// this will reject
+promisified('bad')
+```

+ 36 - 0
node_modules/@gar/promisify/index.js

@@ -0,0 +1,36 @@
+'use strict'
+
+const { promisify } = require('util')
+
+const handler = {
+  get: function (target, prop, receiver) {
+    if (typeof target[prop] !== 'function') {
+      return target[prop]
+    }
+    if (target[prop][promisify.custom]) {
+      return function () {
+        return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments)
+      }
+    }
+    return function () {
+      return new Promise((resolve, reject) => {
+        Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) {
+          if (err) {
+            return reject(err)
+          }
+          resolve(result)
+        }])
+      })
+    }
+  }
+}
+
+module.exports = function (thingToPromisify) {
+  if (typeof thingToPromisify === 'function') {
+    return promisify(thingToPromisify)
+  }
+  if (typeof thingToPromisify === 'object') {
+    return new Proxy(thingToPromisify, handler)
+  }
+  throw new TypeError('Can only promisify functions or objects')
+}

+ 65 - 0
node_modules/@gar/promisify/package.json

@@ -0,0 +1,65 @@
+{
+  "_from": "@gar/promisify@^1.0.1",
+  "_id": "@gar/promisify@1.1.3",
+  "_inBundle": false,
+  "_integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==",
+  "_location": "/@gar/promisify",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "@gar/promisify@^1.0.1",
+    "name": "@gar/promisify",
+    "escapedName": "@gar%2fpromisify",
+    "scope": "@gar",
+    "rawSpec": "^1.0.1",
+    "saveSpec": null,
+    "fetchSpec": "^1.0.1"
+  },
+  "_requiredBy": [
+    "/@npmcli/fs"
+  ],
+  "_resolved": "https://registry.npmmirror.com/@gar/promisify/-/promisify-1.1.3.tgz",
+  "_shasum": "555193ab2e3bb3b6adc3d551c9c030d9e860daf6",
+  "_spec": "@gar/promisify@^1.0.1",
+  "_where": "E:\\psy_web_share\\node_modules\\@npmcli\\fs",
+  "author": {
+    "name": "Gar",
+    "email": "gar+npm@danger.computer"
+  },
+  "bugs": {
+    "url": "https://github.com/wraithgar/gar-promisify/issues"
+  },
+  "bundleDependencies": false,
+  "deprecated": false,
+  "description": "Promisify an entire class or object",
+  "devDependencies": {
+    "@hapi/code": "^8.0.1",
+    "@hapi/lab": "^24.1.0",
+    "standard": "^16.0.3"
+  },
+  "files": [
+    "index.js"
+  ],
+  "homepage": "https://github.com/wraithgar/gar-promisify#readme",
+  "keywords": [
+    "promisify",
+    "all",
+    "class",
+    "object"
+  ],
+  "license": "MIT",
+  "main": "index.js",
+  "name": "@gar/promisify",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/wraithgar/gar-promisify.git"
+  },
+  "scripts": {
+    "lint": "standard",
+    "lint:fix": "standard --fix",
+    "posttest": "npm run lint",
+    "test": "lab -a @hapi/code -t 100"
+  },
+  "version": "1.1.3"
+}

+ 20 - 0
node_modules/@npmcli/fs/LICENSE.md

@@ -0,0 +1,20 @@
+<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
+
+ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.

+ 60 - 0
node_modules/@npmcli/fs/README.md

@@ -0,0 +1,60 @@
+# @npmcli/fs
+
+polyfills, and extensions, of the core `fs` module.
+
+## Features
+
+- all exposed functions return promises
+- `fs.rm` polyfill for node versions < 14.14.0
+- `fs.mkdir` polyfill adding support for the `recursive` and `force` options in node versions < 10.12.0
+- `fs.copyFile` extended to accept an `owner` option
+- `fs.mkdir` extended to accept an `owner` option
+- `fs.mkdtemp` extended to accept an `owner` option
+- `fs.writeFile` extended to accept an `owner` option
+- `fs.withTempDir` added
+- `fs.cp` polyfill for node < 16.7.0
+
+## The `owner` option
+
+The `copyFile`, `mkdir`, `mkdtemp`, `writeFile`, and `withTempDir` functions
+all accept a new `owner` property in their options. It can be used in two ways:
+
+- `{ owner: { uid: 100, gid: 100 } }` - set the `uid` and `gid` explicitly
+- `{ owner: 100 }` - use one value, will set both `uid` and `gid` the same
+
+The special string `'inherit'` may be passed instead of a number, which will
+cause this module to automatically determine the correct `uid` and/or `gid`
+from the nearest existing parent directory of the target.
+
+## `fs.withTempDir(root, fn, options) -> Promise`
+
+### Parameters
+
+- `root`: the directory in which to create the temporary directory
+- `fn`: a function that will be called with the path to the temporary directory
+- `options`
+  - `tmpPrefix`: a prefix to be used in the generated directory name
+
+### Usage
+
+The `withTempDir` function creates a temporary directory, runs the provided
+function (`fn`), then removes the temporary directory and resolves or rejects
+based on the result of `fn`.
+
+```js
+const fs = require('@npmcli/fs')
+const os = require('os')
+
+// this function will be called with the full path to the temporary directory
+// it is called with `await` behind the scenes, so can be async if desired.
+const myFunction = async (tempPath) => {
+  return 'done!'
+}
+
+const main = async () => {
+  const result = await fs.withTempDir(os.tmpdir(), myFunction)
+  // result === 'done!'
+}
+
+main()
+```

+ 17 - 0
node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js

@@ -0,0 +1,17 @@
+const url = require('url')
+
+const node = require('../node.js')
+const polyfill = require('./polyfill.js')
+
+const useNative = node.satisfies('>=10.12.0')
+
+const fileURLToPath = (path) => {
+  // the polyfill is tested separately from this module, no need to hack
+  // process.version to try to trigger it just for coverage
+  // istanbul ignore next
+  return useNative
+    ? url.fileURLToPath(path)
+    : polyfill(path)
+}
+
+module.exports = fileURLToPath

+ 121 - 0
node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js

@@ -0,0 +1,121 @@
+const { URL, domainToUnicode } = require('url')
+
+const CHAR_LOWERCASE_A = 97
+const CHAR_LOWERCASE_Z = 122
+
+const isWindows = process.platform === 'win32'
+
+class ERR_INVALID_FILE_URL_HOST extends TypeError {
+  constructor (platform) {
+    super(`File URL host must be "localhost" or empty on ${platform}`)
+    this.code = 'ERR_INVALID_FILE_URL_HOST'
+  }
+
+  toString () {
+    return `${this.name} [${this.code}]: ${this.message}`
+  }
+}
+
+class ERR_INVALID_FILE_URL_PATH extends TypeError {
+  constructor (msg) {
+    super(`File URL path ${msg}`)
+    this.code = 'ERR_INVALID_FILE_URL_PATH'
+  }
+
+  toString () {
+    return `${this.name} [${this.code}]: ${this.message}`
+  }
+}
+
+class ERR_INVALID_ARG_TYPE extends TypeError {
+  constructor (name, actual) {
+    super(`The "${name}" argument must be one of type string or an instance ` +
+      `of URL. Received type ${typeof actual} ${actual}`)
+    this.code = 'ERR_INVALID_ARG_TYPE'
+  }
+
+  toString () {
+    return `${this.name} [${this.code}]: ${this.message}`
+  }
+}
+
+class ERR_INVALID_URL_SCHEME extends TypeError {
+  constructor (expected) {
+    super(`The URL must be of scheme ${expected}`)
+    this.code = 'ERR_INVALID_URL_SCHEME'
+  }
+
+  toString () {
+    return `${this.name} [${this.code}]: ${this.message}`
+  }
+}
+
+const isURLInstance = (input) => {
+  return input != null && input.href && input.origin
+}
+
+const getPathFromURLWin32 = (url) => {
+  const hostname = url.hostname
+  let pathname = url.pathname
+  for (let n = 0; n < pathname.length; n++) {
+    if (pathname[n] === '%') {
+      const third = pathname.codePointAt(n + 2) | 0x20
+      if ((pathname[n + 1] === '2' && third === 102) ||
+        (pathname[n + 1] === '5' && third === 99)) {
+        throw new ERR_INVALID_FILE_URL_PATH('must not include encoded \\ or / characters')
+      }
+    }
+  }
+
+  pathname = pathname.replace(/\//g, '\\')
+  pathname = decodeURIComponent(pathname)
+  if (hostname !== '') {
+    return `\\\\${domainToUnicode(hostname)}${pathname}`
+  }
+
+  const letter = pathname.codePointAt(1) | 0x20
+  const sep = pathname[2]
+  if (letter < CHAR_LOWERCASE_A || letter > CHAR_LOWERCASE_Z ||
+    (sep !== ':')) {
+    throw new ERR_INVALID_FILE_URL_PATH('must be absolute')
+  }
+
+  return pathname.slice(1)
+}
+
+const getPathFromURLPosix = (url) => {
+  if (url.hostname !== '') {
+    throw new ERR_INVALID_FILE_URL_HOST(process.platform)
+  }
+
+  const pathname = url.pathname
+
+  for (let n = 0; n < pathname.length; n++) {
+    if (pathname[n] === '%') {
+      const third = pathname.codePointAt(n + 2) | 0x20
+      if (pathname[n + 1] === '2' && third === 102) {
+        throw new ERR_INVALID_FILE_URL_PATH('must not include encoded / characters')
+      }
+    }
+  }
+
+  return decodeURIComponent(pathname)
+}
+
+const fileURLToPath = (path) => {
+  if (typeof path === 'string') {
+    path = new URL(path)
+  } else if (!isURLInstance(path)) {
+    throw new ERR_INVALID_ARG_TYPE('path', ['string', 'URL'], path)
+  }
+
+  if (path.protocol !== 'file:') {
+    throw new ERR_INVALID_URL_SCHEME('file')
+  }
+
+  return isWindows
+    ? getPathFromURLWin32(path)
+    : getPathFromURLPosix(path)
+}
+
+module.exports = fileURLToPath

+ 20 - 0
node_modules/@npmcli/fs/lib/common/get-options.js

@@ -0,0 +1,20 @@
+// given an input that may or may not be an object, return an object that has
+// a copy of every defined property listed in 'copy'. if the input is not an
+// object, assign it to the property named by 'wrap'
+const getOptions = (input, { copy, wrap }) => {
+  const result = {}
+
+  if (input && typeof input === 'object') {
+    for (const prop of copy) {
+      if (input[prop] !== undefined) {
+        result[prop] = input[prop]
+      }
+    }
+  } else {
+    result[wrap] = input
+  }
+
+  return result
+}
+
+module.exports = getOptions

+ 9 - 0
node_modules/@npmcli/fs/lib/common/node.js

@@ -0,0 +1,9 @@
+const semver = require('semver')
+
+const satisfies = (range) => {
+  return semver.satisfies(process.version, range, { includePrerelease: true })
+}
+
+module.exports = {
+  satisfies,
+}

+ 92 - 0
node_modules/@npmcli/fs/lib/common/owner.js

@@ -0,0 +1,92 @@
+const { dirname, resolve } = require('path')
+
+const fileURLToPath = require('./file-url-to-path/index.js')
+const fs = require('../fs.js')
+
+// given a path, find the owner of the nearest parent
+const find = async (path) => {
+  // if we have no getuid, permissions are irrelevant on this platform
+  if (!process.getuid) {
+    return {}
+  }
+
+  // fs methods accept URL objects with a scheme of file: so we need to unwrap
+  // those into an actual path string before we can resolve it
+  const resolved = path != null && path.href && path.origin
+    ? resolve(fileURLToPath(path))
+    : resolve(path)
+
+  let stat
+
+  try {
+    stat = await fs.lstat(resolved)
+  } finally {
+    // if we got a stat, return its contents
+    if (stat) {
+      return { uid: stat.uid, gid: stat.gid }
+    }
+
+    // try the parent directory
+    if (resolved !== dirname(resolved)) {
+      return find(dirname(resolved))
+    }
+
+    // no more parents, never got a stat, just return an empty object
+    return {}
+  }
+}
+
+// given a path, uid, and gid update the ownership of the path if necessary
+const update = async (path, uid, gid) => {
+  // nothing to update, just exit
+  if (uid === undefined && gid === undefined) {
+    return
+  }
+
+  try {
+    // see if the permissions are already the same, if they are we don't
+    // need to do anything, so return early
+    const stat = await fs.stat(path)
+    if (uid === stat.uid && gid === stat.gid) {
+      return
+    }
+  } catch (err) {}
+
+  try {
+    await fs.chown(path, uid, gid)
+  } catch (err) {}
+}
+
+// accepts a `path` and the `owner` property of an options object and normalizes
+// it into an object with numerical `uid` and `gid`
+const validate = async (path, input) => {
+  let uid
+  let gid
+
+  if (typeof input === 'string' || typeof input === 'number') {
+    uid = input
+    gid = input
+  } else if (input && typeof input === 'object') {
+    uid = input.uid
+    gid = input.gid
+  }
+
+  if (uid === 'inherit' || gid === 'inherit') {
+    const owner = await find(path)
+    if (uid === 'inherit') {
+      uid = owner.uid
+    }
+
+    if (gid === 'inherit') {
+      gid = owner.gid
+    }
+  }
+
+  return { uid, gid }
+}
+
+module.exports = {
+  find,
+  update,
+  validate,
+}

+ 22 - 0
node_modules/@npmcli/fs/lib/copy-file.js

@@ -0,0 +1,22 @@
+const fs = require('./fs.js')
+const getOptions = require('./common/get-options.js')
+const owner = require('./common/owner.js')
+
+const copyFile = async (src, dest, opts) => {
+  const options = getOptions(opts, {
+    copy: ['mode', 'owner'],
+    wrap: 'mode',
+  })
+
+  const { uid, gid } = await owner.validate(dest, options.owner)
+
+  // the node core method as of 16.5.0 does not support the mode being in an
+  // object, so we have to pass the mode value directly
+  const result = await fs.copyFile(src, dest, options.mode)
+
+  await owner.update(dest, uid, gid)
+
+  return result
+}
+
+module.exports = copyFile

+ 15 - 0
node_modules/@npmcli/fs/lib/cp/LICENSE

@@ -0,0 +1,15 @@
+(The MIT License)
+
+Copyright (c) 2011-2017 JP Richardson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
+(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
+ merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 22 - 0
node_modules/@npmcli/fs/lib/cp/index.js

@@ -0,0 +1,22 @@
+const fs = require('../fs.js')
+const getOptions = require('../common/get-options.js')
+const node = require('../common/node.js')
+const polyfill = require('./polyfill.js')
+
+// node 16.7.0 added fs.cp
+const useNative = node.satisfies('>=16.7.0')
+
+const cp = async (src, dest, opts) => {
+  const options = getOptions(opts, {
+    copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'],
+  })
+
+  // the polyfill is tested separately from this module, no need to hack
+  // process.version to try to trigger it just for coverage
+  // istanbul ignore next
+  return useNative
+    ? fs.cp(src, dest, options)
+    : polyfill(src, dest, options)
+}
+
+module.exports = cp

+ 428 - 0
node_modules/@npmcli/fs/lib/cp/polyfill.js

@@ -0,0 +1,428 @@
+// this file is a modified version of the code in node 17.2.0
+// which is, in turn, a modified version of the fs-extra module on npm
+// node core changes:
+// - Use of the assert module has been replaced with core's error system.
+// - All code related to the glob dependency has been removed.
+// - Bring your own custom fs module is not currently supported.
+// - Some basic code cleanup.
+// changes here:
+// - remove all callback related code
+// - drop sync support
+// - change assertions back to non-internal methods (see options.js)
+// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
+'use strict'
+
+const {
+  ERR_FS_CP_DIR_TO_NON_DIR,
+  ERR_FS_CP_EEXIST,
+  ERR_FS_CP_EINVAL,
+  ERR_FS_CP_FIFO_PIPE,
+  ERR_FS_CP_NON_DIR_TO_DIR,
+  ERR_FS_CP_SOCKET,
+  ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY,
+  ERR_FS_CP_UNKNOWN,
+  ERR_FS_EISDIR,
+  ERR_INVALID_ARG_TYPE,
+} = require('../errors.js')
+const {
+  constants: {
+    errno: {
+      EEXIST,
+      EISDIR,
+      EINVAL,
+      ENOTDIR,
+    },
+  },
+} = require('os')
+const {
+  chmod,
+  copyFile,
+  lstat,
+  mkdir,
+  readdir,
+  readlink,
+  stat,
+  symlink,
+  unlink,
+  utimes,
+} = require('../fs.js')
+const {
+  dirname,
+  isAbsolute,
+  join,
+  parse,
+  resolve,
+  sep,
+  toNamespacedPath,
+} = require('path')
+const { fileURLToPath } = require('url')
+
+const defaultOptions = {
+  dereference: false,
+  errorOnExist: false,
+  filter: undefined,
+  force: true,
+  preserveTimestamps: false,
+  recursive: false,
+}
+
+async function cp (src, dest, opts) {
+  if (opts != null && typeof opts !== 'object') {
+    throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts)
+  }
+  return cpFn(
+    toNamespacedPath(getValidatedPath(src)),
+    toNamespacedPath(getValidatedPath(dest)),
+    { ...defaultOptions, ...opts })
+}
+
+function getValidatedPath (fileURLOrPath) {
+  const path = fileURLOrPath != null && fileURLOrPath.href
+      && fileURLOrPath.origin
+    ? fileURLToPath(fileURLOrPath)
+    : fileURLOrPath
+  return path
+}
+
+async function cpFn (src, dest, opts) {
+  // Warn about using preserveTimestamps on 32-bit node
+  // istanbul ignore next
+  if (opts.preserveTimestamps && process.arch === 'ia32') {
+    const warning = 'Using the preserveTimestamps option in 32-bit ' +
+      'node is not recommended'
+    process.emitWarning(warning, 'TimestampPrecisionWarning')
+  }
+  const stats = await checkPaths(src, dest, opts)
+  const { srcStat, destStat } = stats
+  await checkParentPaths(src, srcStat, dest)
+  if (opts.filter) {
+    return handleFilter(checkParentDir, destStat, src, dest, opts)
+  }
+  return checkParentDir(destStat, src, dest, opts)
+}
+
+async function checkPaths (src, dest, opts) {
+  const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts)
+  if (destStat) {
+    if (areIdentical(srcStat, destStat)) {
+      throw new ERR_FS_CP_EINVAL({
+        message: 'src and dest cannot be the same',
+        path: dest,
+        syscall: 'cp',
+        errno: EINVAL,
+      })
+    }
+    if (srcStat.isDirectory() && !destStat.isDirectory()) {
+      throw new ERR_FS_CP_DIR_TO_NON_DIR({
+        message: `cannot overwrite directory ${src} ` +
+            `with non-directory ${dest}`,
+        path: dest,
+        syscall: 'cp',
+        errno: EISDIR,
+      })
+    }
+    if (!srcStat.isDirectory() && destStat.isDirectory()) {
+      throw new ERR_FS_CP_NON_DIR_TO_DIR({
+        message: `cannot overwrite non-directory ${src} ` +
+            `with directory ${dest}`,
+        path: dest,
+        syscall: 'cp',
+        errno: ENOTDIR,
+      })
+    }
+  }
+
+  if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
+    throw new ERR_FS_CP_EINVAL({
+      message: `cannot copy ${src} to a subdirectory of self ${dest}`,
+      path: dest,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  }
+  return { srcStat, destStat }
+}
+
+function areIdentical (srcStat, destStat) {
+  return destStat.ino && destStat.dev && destStat.ino === srcStat.ino &&
+    destStat.dev === srcStat.dev
+}
+
+function getStats (src, dest, opts) {
+  const statFunc = opts.dereference ?
+    (file) => stat(file, { bigint: true }) :
+    (file) => lstat(file, { bigint: true })
+  return Promise.all([
+    statFunc(src),
+    statFunc(dest).catch((err) => {
+      // istanbul ignore next: unsure how to cover.
+      if (err.code === 'ENOENT') {
+        return null
+      }
+      // istanbul ignore next: unsure how to cover.
+      throw err
+    }),
+  ])
+}
+
+async function checkParentDir (destStat, src, dest, opts) {
+  const destParent = dirname(dest)
+  const dirExists = await pathExists(destParent)
+  if (dirExists) {
+    return getStatsForCopy(destStat, src, dest, opts)
+  }
+  await mkdir(destParent, { recursive: true })
+  return getStatsForCopy(destStat, src, dest, opts)
+}
+
+function pathExists (dest) {
+  return stat(dest).then(
+    () => true,
+    // istanbul ignore next: not sure when this would occur
+    (err) => (err.code === 'ENOENT' ? false : Promise.reject(err)))
+}
+
+// Recursively check if dest parent is a subdirectory of src.
+// It works for all file types including symlinks since it
+// checks the src and dest inodes. It starts from the deepest
+// parent and stops once it reaches the src parent or the root path.
+async function checkParentPaths (src, srcStat, dest) {
+  const srcParent = resolve(dirname(src))
+  const destParent = resolve(dirname(dest))
+  if (destParent === srcParent || destParent === parse(destParent).root) {
+    return
+  }
+  let destStat
+  try {
+    destStat = await stat(destParent, { bigint: true })
+  } catch (err) {
+    // istanbul ignore else: not sure when this would occur
+    if (err.code === 'ENOENT') {
+      return
+    }
+    // istanbul ignore next: not sure when this would occur
+    throw err
+  }
+  if (areIdentical(srcStat, destStat)) {
+    throw new ERR_FS_CP_EINVAL({
+      message: `cannot copy ${src} to a subdirectory of self ${dest}`,
+      path: dest,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  }
+  return checkParentPaths(src, srcStat, destParent)
+}
+
+const normalizePathToArray = (path) =>
+  resolve(path).split(sep).filter(Boolean)
+
+// Return true if dest is a subdir of src, otherwise false.
+// It only checks the path strings.
+function isSrcSubdir (src, dest) {
+  const srcArr = normalizePathToArray(src)
+  const destArr = normalizePathToArray(dest)
+  return srcArr.every((cur, i) => destArr[i] === cur)
+}
+
+async function handleFilter (onInclude, destStat, src, dest, opts, cb) {
+  const include = await opts.filter(src, dest)
+  if (include) {
+    return onInclude(destStat, src, dest, opts, cb)
+  }
+}
+
+function startCopy (destStat, src, dest, opts) {
+  if (opts.filter) {
+    return handleFilter(getStatsForCopy, destStat, src, dest, opts)
+  }
+  return getStatsForCopy(destStat, src, dest, opts)
+}
+
+async function getStatsForCopy (destStat, src, dest, opts) {
+  const statFn = opts.dereference ? stat : lstat
+  const srcStat = await statFn(src)
+  // istanbul ignore else: can't portably test FIFO
+  if (srcStat.isDirectory() && opts.recursive) {
+    return onDir(srcStat, destStat, src, dest, opts)
+  } else if (srcStat.isDirectory()) {
+    throw new ERR_FS_EISDIR({
+      message: `${src} is a directory (not copied)`,
+      path: src,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  } else if (srcStat.isFile() ||
+            srcStat.isCharacterDevice() ||
+            srcStat.isBlockDevice()) {
+    return onFile(srcStat, destStat, src, dest, opts)
+  } else if (srcStat.isSymbolicLink()) {
+    return onLink(destStat, src, dest)
+  } else if (srcStat.isSocket()) {
+    throw new ERR_FS_CP_SOCKET({
+      message: `cannot copy a socket file: ${dest}`,
+      path: dest,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  } else if (srcStat.isFIFO()) {
+    throw new ERR_FS_CP_FIFO_PIPE({
+      message: `cannot copy a FIFO pipe: ${dest}`,
+      path: dest,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  }
+  // istanbul ignore next: should be unreachable
+  throw new ERR_FS_CP_UNKNOWN({
+    message: `cannot copy an unknown file type: ${dest}`,
+    path: dest,
+    syscall: 'cp',
+    errno: EINVAL,
+  })
+}
+
+function onFile (srcStat, destStat, src, dest, opts) {
+  if (!destStat) {
+    return _copyFile(srcStat, src, dest, opts)
+  }
+  return mayCopyFile(srcStat, src, dest, opts)
+}
+
+async function mayCopyFile (srcStat, src, dest, opts) {
+  if (opts.force) {
+    await unlink(dest)
+    return _copyFile(srcStat, src, dest, opts)
+  } else if (opts.errorOnExist) {
+    throw new ERR_FS_CP_EEXIST({
+      message: `${dest} already exists`,
+      path: dest,
+      syscall: 'cp',
+      errno: EEXIST,
+    })
+  }
+}
+
+async function _copyFile (srcStat, src, dest, opts) {
+  await copyFile(src, dest)
+  if (opts.preserveTimestamps) {
+    return handleTimestampsAndMode(srcStat.mode, src, dest)
+  }
+  return setDestMode(dest, srcStat.mode)
+}
+
+async function handleTimestampsAndMode (srcMode, src, dest) {
+  // Make sure the file is writable before setting the timestamp
+  // otherwise open fails with EPERM when invoked with 'r+'
+  // (through utimes call)
+  if (fileIsNotWritable(srcMode)) {
+    await makeFileWritable(dest, srcMode)
+    return setDestTimestampsAndMode(srcMode, src, dest)
+  }
+  return setDestTimestampsAndMode(srcMode, src, dest)
+}
+
+function fileIsNotWritable (srcMode) {
+  return (srcMode & 0o200) === 0
+}
+
+function makeFileWritable (dest, srcMode) {
+  return setDestMode(dest, srcMode | 0o200)
+}
+
+async function setDestTimestampsAndMode (srcMode, src, dest) {
+  await setDestTimestamps(src, dest)
+  return setDestMode(dest, srcMode)
+}
+
+function setDestMode (dest, srcMode) {
+  return chmod(dest, srcMode)
+}
+
+async function setDestTimestamps (src, dest) {
+  // The initial srcStat.atime cannot be trusted
+  // because it is modified by the read(2) system call
+  // (See https://nodejs.org/api/fs.html#fs_stat_time_values)
+  const updatedSrcStat = await stat(src)
+  return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
+}
+
+function onDir (srcStat, destStat, src, dest, opts) {
+  if (!destStat) {
+    return mkDirAndCopy(srcStat.mode, src, dest, opts)
+  }
+  return copyDir(src, dest, opts)
+}
+
+async function mkDirAndCopy (srcMode, src, dest, opts) {
+  await mkdir(dest)
+  await copyDir(src, dest, opts)
+  return setDestMode(dest, srcMode)
+}
+
+async function copyDir (src, dest, opts) {
+  const dir = await readdir(src)
+  for (let i = 0; i < dir.length; i++) {
+    const item = dir[i]
+    const srcItem = join(src, item)
+    const destItem = join(dest, item)
+    const { destStat } = await checkPaths(srcItem, destItem, opts)
+    await startCopy(destStat, srcItem, destItem, opts)
+  }
+}
+
+async function onLink (destStat, src, dest) {
+  let resolvedSrc = await readlink(src)
+  if (!isAbsolute(resolvedSrc)) {
+    resolvedSrc = resolve(dirname(src), resolvedSrc)
+  }
+  if (!destStat) {
+    return symlink(resolvedSrc, dest)
+  }
+  let resolvedDest
+  try {
+    resolvedDest = await readlink(dest)
+  } catch (err) {
+    // Dest exists and is a regular file or directory,
+    // Windows may throw UNKNOWN error. If dest already exists,
+    // fs throws error anyway, so no need to guard against it here.
+    // istanbul ignore next: can only test on windows
+    if (err.code === 'EINVAL' || err.code === 'UNKNOWN') {
+      return symlink(resolvedSrc, dest)
+    }
+    // istanbul ignore next: should not be possible
+    throw err
+  }
+  if (!isAbsolute(resolvedDest)) {
+    resolvedDest = resolve(dirname(dest), resolvedDest)
+  }
+  if (isSrcSubdir(resolvedSrc, resolvedDest)) {
+    throw new ERR_FS_CP_EINVAL({
+      message: `cannot copy ${resolvedSrc} to a subdirectory of self ` +
+            `${resolvedDest}`,
+      path: dest,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  }
+  // Do not copy if src is a subdir of dest since unlinking
+  // dest in this case would result in removing src contents
+  // and therefore a broken symlink would be created.
+  const srcStat = await stat(src)
+  if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) {
+    throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({
+      message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`,
+      path: dest,
+      syscall: 'cp',
+      errno: EINVAL,
+    })
+  }
+  return copyLink(resolvedSrc, dest)
+}
+
+async function copyLink (resolvedSrc, dest) {
+  await unlink(dest)
+  return symlink(resolvedSrc, dest)
+}
+
+module.exports = cp

+ 129 - 0
node_modules/@npmcli/fs/lib/errors.js

@@ -0,0 +1,129 @@
+'use strict'
+const { inspect } = require('util')
+
+// adapted from node's internal/errors
+// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js
+
+// close copy of node's internal SystemError class.
+class SystemError {
+  constructor (code, prefix, context) {
+    // XXX context.code is undefined in all constructors used in cp/polyfill
+    // that may be a bug copied from node, maybe the constructor should use
+    // `code` not `errno`?  nodejs/node#41104
+    let message = `${prefix}: ${context.syscall} returned ` +
+                  `${context.code} (${context.message})`
+
+    if (context.path !== undefined) {
+      message += ` ${context.path}`
+    }
+    if (context.dest !== undefined) {
+      message += ` => ${context.dest}`
+    }
+
+    this.code = code
+    Object.defineProperties(this, {
+      name: {
+        value: 'SystemError',
+        enumerable: false,
+        writable: true,
+        configurable: true,
+      },
+      message: {
+        value: message,
+        enumerable: false,
+        writable: true,
+        configurable: true,
+      },
+      info: {
+        value: context,
+        enumerable: true,
+        configurable: true,
+        writable: false,
+      },
+      errno: {
+        get () {
+          return context.errno
+        },
+        set (value) {
+          context.errno = value
+        },
+        enumerable: true,
+        configurable: true,
+      },
+      syscall: {
+        get () {
+          return context.syscall
+        },
+        set (value) {
+          context.syscall = value
+        },
+        enumerable: true,
+        configurable: true,
+      },
+    })
+
+    if (context.path !== undefined) {
+      Object.defineProperty(this, 'path', {
+        get () {
+          return context.path
+        },
+        set (value) {
+          context.path = value
+        },
+        enumerable: true,
+        configurable: true,
+      })
+    }
+
+    if (context.dest !== undefined) {
+      Object.defineProperty(this, 'dest', {
+        get () {
+          return context.dest
+        },
+        set (value) {
+          context.dest = value
+        },
+        enumerable: true,
+        configurable: true,
+      })
+    }
+  }
+
+  toString () {
+    return `${this.name} [${this.code}]: ${this.message}`
+  }
+
+  [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) {
+    return inspect(this, {
+      ...ctx,
+      getters: true,
+      customInspect: false,
+    })
+  }
+}
+
+function E (code, message) {
+  module.exports[code] = class NodeError extends SystemError {
+    constructor (ctx) {
+      super(code, message, ctx)
+    }
+  }
+}
+
+E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory')
+E('ERR_FS_CP_EEXIST', 'Target already exists')
+E('ERR_FS_CP_EINVAL', 'Invalid src or dest')
+E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe')
+E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory')
+E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file')
+E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self')
+E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type')
+E('ERR_FS_EISDIR', 'Path is a directory')
+
+module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error {
+  constructor (name, expected, actual) {
+    super()
+    this.code = 'ERR_INVALID_ARG_TYPE'
+    this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}`
+  }
+}

+ 8 - 0
node_modules/@npmcli/fs/lib/fs.js

@@ -0,0 +1,8 @@
+const fs = require('fs')
+const promisify = require('@gar/promisify')
+
+// this module returns the core fs module wrapped in a proxy that promisifies
+// method calls within the getter. we keep it in a separate module so that the
+// overridden methods have a consistent way to get to promisified fs methods
+// without creating a circular dependency
+module.exports = promisify(fs)

+ 10 - 0
node_modules/@npmcli/fs/lib/index.js

@@ -0,0 +1,10 @@
+module.exports = {
+  ...require('./fs.js'),
+  copyFile: require('./copy-file.js'),
+  cp: require('./cp/index.js'),
+  mkdir: require('./mkdir/index.js'),
+  mkdtemp: require('./mkdtemp.js'),
+  rm: require('./rm/index.js'),
+  withTempDir: require('./with-temp-dir.js'),
+  writeFile: require('./write-file.js'),
+}

+ 32 - 0
node_modules/@npmcli/fs/lib/mkdir/index.js

@@ -0,0 +1,32 @@
+const fs = require('../fs.js')
+const getOptions = require('../common/get-options.js')
+const node = require('../common/node.js')
+const owner = require('../common/owner.js')
+
+const polyfill = require('./polyfill.js')
+
+// node 10.12.0 added the options parameter, which allows recursive and mode
+// properties to be passed
+const useNative = node.satisfies('>=10.12.0')
+
+// extends mkdir with the ability to specify an owner of the new dir
+const mkdir = async (path, opts) => {
+  const options = getOptions(opts, {
+    copy: ['mode', 'recursive', 'owner'],
+    wrap: 'mode',
+  })
+  const { uid, gid } = await owner.validate(path, options.owner)
+
+  // the polyfill is tested separately from this module, no need to hack
+  // process.version to try to trigger it just for coverage
+  // istanbul ignore next
+  const result = useNative
+    ? await fs.mkdir(path, options)
+    : await polyfill(path, options)
+
+  await owner.update(path, uid, gid)
+
+  return result
+}
+
+module.exports = mkdir

+ 81 - 0
node_modules/@npmcli/fs/lib/mkdir/polyfill.js

@@ -0,0 +1,81 @@
+const { dirname } = require('path')
+
+const fileURLToPath = require('../common/file-url-to-path/index.js')
+const fs = require('../fs.js')
+
+const defaultOptions = {
+  mode: 0o777,
+  recursive: false,
+}
+
+const mkdir = async (path, opts) => {
+  const options = { ...defaultOptions, ...opts }
+
+  // if we're not in recursive mode, just call the real mkdir with the path and
+  // the mode option only
+  if (!options.recursive) {
+    return fs.mkdir(path, options.mode)
+  }
+
+  const makeDirectory = async (dir, mode) => {
+    // we can't use dirname directly since these functions support URL
+    // objects with the file: protocol as the path input, so first we get a
+    // string path, then we can call dirname on that
+    const parent = dir != null && dir.href && dir.origin
+      ? dirname(fileURLToPath(dir))
+      : dirname(dir)
+
+    // if the parent is the dir itself, try to create it. anything but EISDIR
+    // should be rethrown
+    if (parent === dir) {
+      try {
+        await fs.mkdir(dir, opts)
+      } catch (err) {
+        if (err.code !== 'EISDIR') {
+          throw err
+        }
+      }
+      return undefined
+    }
+
+    try {
+      await fs.mkdir(dir, mode)
+      return dir
+    } catch (err) {
+      // ENOENT means the parent wasn't there, so create that
+      if (err.code === 'ENOENT') {
+        const made = await makeDirectory(parent, mode)
+        await makeDirectory(dir, mode)
+        // return the shallowest path we created, i.e. the result of creating
+        // the parent
+        return made
+      }
+
+      // an EEXIST means there's already something there
+      // an EROFS means we have a read-only filesystem and can't create a dir
+      // any other error is fatal and we should give up now
+      if (err.code !== 'EEXIST' && err.code !== 'EROFS') {
+        throw err
+      }
+
+      // stat the directory, if the result is a directory, then we successfully
+      // created this one so return its path. otherwise, we reject with the
+      // original error by ignoring the error in the catch
+      try {
+        const stat = await fs.stat(dir)
+        if (stat.isDirectory()) {
+          // if it already existed, we didn't create anything so return
+          // undefined
+          return undefined
+        }
+      } catch (_) {}
+
+      // if the thing that's there isn't a directory, then just re-throw
+      throw err
+    }
+  }
+
+  return makeDirectory(path, options.mode)
+}
+
+module.exports = mkdir

+ 28 - 0
node_modules/@npmcli/fs/lib/mkdtemp.js

@@ -0,0 +1,28 @@
+const { dirname, sep } = require('path')
+
+const fs = require('./fs.js')
+const getOptions = require('./common/get-options.js')
+const owner = require('./common/owner.js')
+
+const mkdtemp = async (prefix, opts) => {
+  const options = getOptions(opts, {
+    copy: ['encoding', 'owner'],
+    wrap: 'encoding',
+  })
+
+  // mkdtemp relies on the trailing path separator to indicate if it should
+  // create a directory inside of the prefix. if that's the case then the root
+  // we infer ownership from is the prefix itself, otherwise it's the dirname
+  // /tmp -> /tmpABCDEF, infers from /
+  // /tmp/ -> /tmp/ABCDEF, infers from /tmp
+  const root = prefix.endsWith(sep) ? prefix : dirname(prefix)
+  const { uid, gid } = await owner.validate(root, options.owner)
+
+  const result = await fs.mkdtemp(prefix, options)
+
+  await owner.update(result, uid, gid)
+
+  return result
+}
+
+module.exports = mkdtemp

+ 22 - 0
node_modules/@npmcli/fs/lib/rm/index.js

@@ -0,0 +1,22 @@
+const fs = require('../fs.js')
+const getOptions = require('../common/get-options.js')
+const node = require('../common/node.js')
+const polyfill = require('./polyfill.js')
+
+// node 14.14.0 added fs.rm, which allows both the force and recursive options
+const useNative = node.satisfies('>=14.14.0')
+
+const rm = async (path, opts) => {
+  const options = getOptions(opts, {
+    copy: ['retryDelay', 'maxRetries', 'recursive', 'force'],
+  })
+
+  // the polyfill is tested separately from this module, no need to hack
+  // process.version to try to trigger it just for coverage
+  // istanbul ignore next
+  return useNative
+    ? fs.rm(path, options)
+    : polyfill(path, options)
+}
+
+module.exports = rm

+ 239 - 0
node_modules/@npmcli/fs/lib/rm/polyfill.js

@@ -0,0 +1,239 @@
+// this file is a modified version of the code in node core >=14.14.0
+// which is, in turn, a modified version of the rimraf module on npm
+// node core changes:
+// - Use of the assert module has been replaced with core's error system.
+// - All code related to the glob dependency has been removed.
+// - Bring your own custom fs module is not currently supported.
+// - Some basic code cleanup.
+// changes here:
+// - remove all callback related code
+// - drop sync support
+// - change assertions back to non-internal methods (see options.js)
+// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
+const errnos = require('os').constants.errno
+const { join } = require('path')
+const fs = require('../fs.js')
+
+// error codes that mean we need to remove contents
+const notEmptyCodes = new Set([
+  'ENOTEMPTY',
+  'EEXIST',
+  'EPERM',
+])
+
+// error codes we can retry later
+const retryCodes = new Set([
+  'EBUSY',
+  'EMFILE',
+  'ENFILE',
+  'ENOTEMPTY',
+  'EPERM',
+])
+
+const isWindows = process.platform === 'win32'
+
+const defaultOptions = {
+  retryDelay: 100,
+  maxRetries: 0,
+  recursive: false,
+  force: false,
+}
+
+// this is drastically simplified, but should be roughly equivalent to what
+// node core throws
+class ERR_FS_EISDIR extends Error {
+  constructor (path) {
+    super()
+    this.info = {
+      code: 'EISDIR',
+      message: 'is a directory',
+      path,
+      syscall: 'rm',
+      errno: errnos.EISDIR,
+    }
+    this.name = 'SystemError'
+    this.code = 'ERR_FS_EISDIR'
+    this.errno = errnos.EISDIR
+    this.syscall = 'rm'
+    this.path = path
+    this.message = `Path is a directory: ${this.syscall} returned ` +
+      `${this.info.code} (is a directory) ${path}`
+  }
+
+  toString () {
+    return `${this.name} [${this.code}]: ${this.message}`
+  }
+}
+
+class ENOTDIR extends Error {
+  constructor (path) {
+    super()
+    this.name = 'Error'
+    this.code = 'ENOTDIR'
+    this.errno = errnos.ENOTDIR
+    this.syscall = 'rmdir'
+    this.path = path
+    this.message = `not a directory, ${this.syscall} '${this.path}'`
+  }
+
+  toString () {
+    return `${this.name}: ${this.code}: ${this.message}`
+  }
+}
+
+// force is passed separately here because we respect it for the first entry
+// into rimraf only, any further calls that are spawned as a result (i.e. to
+// delete content within the target) will ignore ENOENT errors
+const rimraf = async (path, options, isTop = false) => {
+  const force = isTop ? options.force : true
+  const stat = await fs.lstat(path)
+    .catch((err) => {
+      // we only ignore ENOENT if we're forcing this call
+      if (err.code === 'ENOENT' && force) {
+        return
+      }
+
+      if (isWindows && err.code === 'EPERM') {
+        return fixEPERM(path, options, err, isTop)
+      }
+
+      throw err
+    })
+
+  // no stat object here means either lstat threw an ENOENT, or lstat threw
+  // an EPERM and the fixPERM function took care of things. either way, we're
+  // already done, so return early
+  if (!stat) {
+    return
+  }
+
+  if (stat.isDirectory()) {
+    return rmdir(path, options, null, isTop)
+  }
+
+  return fs.unlink(path)
+    .catch((err) => {
+      if (err.code === 'ENOENT' && force) {
+        return
+      }
+
+      if (err.code === 'EISDIR') {
+        return rmdir(path, options, err, isTop)
+      }
+
+      if (err.code === 'EPERM') {
+        // in windows, we handle this through fixEPERM which will also try to
+        // delete things again. everywhere else since deleting the target as a
+        // file didn't work we go ahead and try to delete it as a directory
+        return isWindows
+          ? fixEPERM(path, options, err, isTop)
+          : rmdir(path, options, err, isTop)
+      }
+
+      throw err
+    })
+}
+
+const fixEPERM = async (path, options, originalErr, isTop) => {
+  const force = isTop ? options.force : true
+  const targetMissing = await fs.chmod(path, 0o666)
+    .catch((err) => {
+      if (err.code === 'ENOENT' && force) {
+        return true
+      }
+
+      throw originalErr
+    })
+
+  // got an ENOENT above, return now. no file = no problem
+  if (targetMissing) {
+    return
+  }
+
+  // this function does its own lstat rather than calling rimraf again to avoid
+  // infinite recursion for a repeating EPERM
+  const stat = await fs.lstat(path)
+    .catch((err) => {
+      if (err.code === 'ENOENT' && force) {
+        return
+      }
+
+      throw originalErr
+    })
+
+  if (!stat) {
+    return
+  }
+
+  if (stat.isDirectory()) {
+    return rmdir(path, options, originalErr, isTop)
+  }
+
+  return fs.unlink(path)
+}
+
+const rmdir = async (path, options, originalErr, isTop) => {
+  if (!options.recursive && isTop) {
+    throw originalErr || new ERR_FS_EISDIR(path)
+  }
+  const force = isTop ? options.force : true
+
+  return fs.rmdir(path)
+    .catch(async (err) => {
+      // in Windows, calling rmdir on a file path will fail with ENOENT rather
+      // than ENOTDIR. to determine if that's what happened, we have to do
+      // another lstat on the path. if the path isn't actually gone, we throw
+      // away the ENOENT and replace it with our own ENOTDIR
+      if (isWindows && err.code === 'ENOENT') {
+        const stillExists = await fs.lstat(path).then(() => true, () => false)
+        if (stillExists) {
+          err = new ENOTDIR(path)
+        }
+      }
+
+      // not there, not a problem
+      if (err.code === 'ENOENT' && force) {
+        return
+      }
+
+      // we may not have originalErr if lstat tells us our target is a
+      // directory but that changes before we actually remove it, so
+      // only throw it here if it's set
+      if (originalErr && err.code === 'ENOTDIR') {
+        throw originalErr
+      }
+
+      // the directory isn't empty, remove the contents and try again
+      if (notEmptyCodes.has(err.code)) {
+        const files = await fs.readdir(path)
+        await Promise.all(files.map((file) => {
+          const target = join(path, file)
+          return rimraf(target, options)
+        }))
+        return fs.rmdir(path)
+      }
+
+      throw err
+    })
+}
+
+const rm = async (path, opts) => {
+  const options = { ...defaultOptions, ...opts }
+  let retries = 0
+
+  const errHandler = async (err) => {
+    if (retryCodes.has(err.code) && ++retries < options.maxRetries) {
+      const delay = retries * options.retryDelay
+      await promiseTimeout(delay)
+      return rimraf(path, options, true).catch(errHandler)
+    }
+
+    throw err
+  }
+
+  return rimraf(path, options, true).catch(errHandler)
+}
+
+const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms))
+
+module.exports = rm

+ 39 - 0
node_modules/@npmcli/fs/lib/with-temp-dir.js

@@ -0,0 +1,39 @@
+const { join, sep } = require('path')
+
+const getOptions = require('./common/get-options.js')
+const mkdir = require('./mkdir/index.js')
+const mkdtemp = require('./mkdtemp.js')
+const rm = require('./rm/index.js')
+
+// create a temp directory, ensure its permissions match its parent, then call
+// the supplied function passing it the path to the directory. clean up after
+// the function finishes, whether it throws or not
+const withTempDir = async (root, fn, opts) => {
+  const options = getOptions(opts, {
+    copy: ['tmpPrefix'],
+  })
+  // create the directory, and fix its ownership
+  await mkdir(root, { recursive: true, owner: 'inherit' })
+
+  const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' })
+  let err
+  let result
+
+  try {
+    result = await fn(target)
+  } catch (_err) {
+    err = _err
+  }
+
+  try {
+    await rm(target, { force: true, recursive: true })
+  } catch (err) {}
+
+  if (err) {
+    throw err
+  }
+
+  return result
+}
+
+module.exports = withTempDir

+ 19 - 0
node_modules/@npmcli/fs/lib/write-file.js

@@ -0,0 +1,19 @@
+const fs = require('./fs.js')
+const getOptions = require('./common/get-options.js')
+const owner = require('./common/owner.js')
+
+const writeFile = async (file, data, opts) => {
+  const options = getOptions(opts, {
+    copy: ['encoding', 'mode', 'flag', 'signal', 'owner'],
+    wrap: 'encoding',
+  })
+  const { uid, gid } = await owner.validate(file, options.owner)
+
+  const result = await fs.writeFile(file, data, options)
+
+  await owner.update(file, uid, gid)
+
+  return result
+}
+
+module.exports = writeFile

+ 66 - 0
node_modules/@npmcli/fs/package.json

@@ -0,0 +1,66 @@
+{
+  "_from": "@npmcli/fs@^1.0.0",
+  "_id": "@npmcli/fs@1.1.1",
+  "_inBundle": false,
+  "_integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==",
+  "_location": "/@npmcli/fs",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "@npmcli/fs@^1.0.0",
+    "name": "@npmcli/fs",
+    "escapedName": "@npmcli%2ffs",
+    "scope": "@npmcli",
+    "rawSpec": "^1.0.0",
+    "saveSpec": null,
+    "fetchSpec": "^1.0.0"
+  },
+  "_requiredBy": [
+    "/cacache"
+  ],
+  "_resolved": "https://registry.npmmirror.com/@npmcli/fs/-/fs-1.1.1.tgz",
+  "_shasum": "72f719fe935e687c56a4faecf3c03d06ba593257",
+  "_spec": "@npmcli/fs@^1.0.0",
+  "_where": "E:\\psy_web_share\\node_modules\\cacache",
+  "author": {
+    "name": "GitHub Inc."
+  },
+  "bundleDependencies": false,
+  "dependencies": {
+    "@gar/promisify": "^1.0.1",
+    "semver": "^7.3.5"
+  },
+  "deprecated": false,
+  "description": "filesystem utilities for the npm cli",
+  "devDependencies": {
+    "@npmcli/template-oss": "^2.3.1",
+    "tap": "^15.0.9"
+  },
+  "files": [
+    "bin",
+    "lib"
+  ],
+  "keywords": [
+    "npm",
+    "oss"
+  ],
+  "license": "ISC",
+  "main": "lib/index.js",
+  "name": "@npmcli/fs",
+  "scripts": {
+    "lint": "eslint '**/*.js'",
+    "lintfix": "npm run lint -- --fix",
+    "npmclilint": "npmcli-lint",
+    "postlint": "npm-template-check",
+    "postsnap": "npm run lintfix --",
+    "posttest": "npm run lint",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preversion": "npm test",
+    "snap": "tap",
+    "test": "tap"
+  },
+  "templateVersion": "2.3.1",
+  "version": "1.1.1"
+}

+ 22 - 0
node_modules/@npmcli/move-file/LICENSE.md

@@ -0,0 +1,22 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
+Copyright (c) npm, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.

+ 69 - 0
node_modules/@npmcli/move-file/README.md

@@ -0,0 +1,69 @@
+# @npmcli/move-file
+
+A fork of [move-file](https://github.com/sindresorhus/move-file) with
+compatibility with all node 10.x versions.
+
+> Move a file (or directory)
+
+The built-in
+[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback)
+is just a JavaScript wrapper for the C `rename(2)` function, which doesn't
+support moving files across partitions or devices. This module is what you
+would have expected `fs.rename()` to be.
+
+## Highlights
+
+- Promise API.
+- Supports moving a file across partitions and devices.
+- Optionally prevent overwriting an existing file.
+- Creates non-existent destination directories for you.
+- Support for Node versions that lack built-in recursive `fs.mkdir()`
+- Automatically recurses when source is a directory.
+
+## Install
+
+```
+$ npm install @npmcli/move-file
+```
+
+## Usage
+
+```js
+const moveFile = require('@npmcli/move-file');
+
+(async () => {
+	await moveFile('source/unicorn.png', 'destination/unicorn.png');
+	console.log('The file has been moved');
+})();
+```
+
+## API
+
+### moveFile(source, destination, options?)
+
+Returns a `Promise` that resolves when the file has been moved.
+
+### moveFile.sync(source, destination, options?)
+
+#### source
+
+Type: `string`
+
+File, or directory, you want to move.
+
+#### destination
+
+Type: `string`
+
+Where you want the file or directory moved.
+
+#### options
+
+Type: `object`
+
+##### overwrite
+
+Type: `boolean`\
+Default: `true`
+
+Overwrite existing destination file(s).

+ 162 - 0
node_modules/@npmcli/move-file/index.js

@@ -0,0 +1,162 @@
+const { dirname, join, resolve, relative, isAbsolute } = require('path')
+const rimraf_ = require('rimraf')
+const { promisify } = require('util')
+const {
+  access: access_,
+  accessSync,
+  copyFile: copyFile_,
+  copyFileSync,
+  unlink: unlink_,
+  unlinkSync,
+  readdir: readdir_,
+  readdirSync,
+  rename: rename_,
+  renameSync,
+  stat: stat_,
+  statSync,
+  lstat: lstat_,
+  lstatSync,
+  symlink: symlink_,
+  symlinkSync,
+  readlink: readlink_,
+  readlinkSync
+} = require('fs')
+
+const access = promisify(access_)
+const copyFile = promisify(copyFile_)
+const unlink = promisify(unlink_)
+const readdir = promisify(readdir_)
+const rename = promisify(rename_)
+const stat = promisify(stat_)
+const lstat = promisify(lstat_)
+const symlink = promisify(symlink_)
+const readlink = promisify(readlink_)
+const rimraf = promisify(rimraf_)
+const rimrafSync = rimraf_.sync
+
+const mkdirp = require('mkdirp')
+
+const pathExists = async path => {
+  try {
+    await access(path)
+    return true
+  } catch (er) {
+    return er.code !== 'ENOENT'
+  }
+}
+
+const pathExistsSync = path => {
+  try {
+    accessSync(path)
+    return true
+  } catch (er) {
+    return er.code !== 'ENOENT'
+  }
+}
+
+const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => {
+  if (!source || !destination) {
+    throw new TypeError('`source` and `destination` file required')
+  }
+
+  options = {
+    overwrite: true,
+    ...options
+  }
+
+  if (!options.overwrite && await pathExists(destination)) {
+    throw new Error(`The destination file exists: ${destination}`)
+  }
+
+  await mkdirp(dirname(destination))
+
+  try {
+    await rename(source, destination)
+  } catch (error) {
+    if (error.code === 'EXDEV' || error.code === 'EPERM') {
+      const sourceStat = await lstat(source)
+      if (sourceStat.isDirectory()) {
+        const files = await readdir(source)
+        await Promise.all(files.map((file) => moveFile(join(source, file), join(destination, file), options, false, symlinks)))
+      } else if (sourceStat.isSymbolicLink()) {
+        symlinks.push({ source, destination })
+      } else {
+        await copyFile(source, destination)
+      }
+    } else {
+      throw error
+    }
+  }
+
+  if (root) {
+    await Promise.all(symlinks.map(async ({ source, destination }) => {
+      let target = await readlink(source)
+      // junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination
+      if (isAbsolute(target))
+        target = resolve(destination, relative(source, target))
+      // try to determine what the actual file is so we can create the correct type of symlink in windows
+      let targetStat
+      try {
+        targetStat = await stat(resolve(dirname(source), target))
+      } catch (err) {}
+      await symlink(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file')
+    }))
+    await rimraf(source)
+  }
+}
+
+const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => {
+  if (!source || !destination) {
+    throw new TypeError('`source` and `destination` file required')
+  }
+
+  options = {
+    overwrite: true,
+    ...options
+  }
+
+  if (!options.overwrite && pathExistsSync(destination)) {
+    throw new Error(`The destination file exists: ${destination}`)
+  }
+
+  mkdirp.sync(dirname(destination))
+
+  try {
+    renameSync(source, destination)
+  } catch (error) {
+    if (error.code === 'EXDEV' || error.code === 'EPERM') {
+      const sourceStat = lstatSync(source)
+      if (sourceStat.isDirectory()) {
+        const files = readdirSync(source)
+        for (const file of files) {
+          moveFileSync(join(source, file), join(destination, file), options, false, symlinks)
+        }
+      } else if (sourceStat.isSymbolicLink()) {
+        symlinks.push({ source, destination })
+      } else {
+        copyFileSync(source, destination)
+      }
+    } else {
+      throw error
+    }
+  }
+
+  if (root) {
+    for (const { source, destination } of symlinks) {
+      let target = readlinkSync(source)
+      // junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination
+      if (isAbsolute(target))
+        target = resolve(destination, relative(source, target))
+      // try to determine what the actual file is so we can create the correct type of symlink in windows
+      let targetStat
+      try {
+        targetStat = statSync(resolve(dirname(source), target))
+      } catch (err) {}
+      symlinkSync(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file')
+    }
+    rimrafSync(source)
+  }
+}
+
+module.exports = moveFile
+module.exports.sync = moveFileSync

+ 64 - 0
node_modules/@npmcli/move-file/package.json

@@ -0,0 +1,64 @@
+{
+  "_from": "@npmcli/move-file@^1.0.1",
+  "_id": "@npmcli/move-file@1.1.2",
+  "_inBundle": false,
+  "_integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==",
+  "_location": "/@npmcli/move-file",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "@npmcli/move-file@^1.0.1",
+    "name": "@npmcli/move-file",
+    "escapedName": "@npmcli%2fmove-file",
+    "scope": "@npmcli",
+    "rawSpec": "^1.0.1",
+    "saveSpec": null,
+    "fetchSpec": "^1.0.1"
+  },
+  "_requiredBy": [
+    "/cacache"
+  ],
+  "_resolved": "https://registry.npmmirror.com/@npmcli/move-file/-/move-file-1.1.2.tgz",
+  "_shasum": "1a82c3e372f7cae9253eb66d72543d6b8685c674",
+  "_spec": "@npmcli/move-file@^1.0.1",
+  "_where": "E:\\psy_web_share\\node_modules\\cacache",
+  "bugs": {
+    "url": "https://github.com/npm/move-file/issues"
+  },
+  "bundleDependencies": false,
+  "dependencies": {
+    "mkdirp": "^1.0.4",
+    "rimraf": "^3.0.2"
+  },
+  "deprecated": "This functionality has been moved to @npmcli/fs",
+  "description": "move a file (fork of move-file)",
+  "devDependencies": {
+    "require-inject": "^1.4.4",
+    "tap": "^14.10.7"
+  },
+  "engines": {
+    "node": ">=10"
+  },
+  "files": [
+    "index.js"
+  ],
+  "homepage": "https://github.com/npm/move-file#readme",
+  "license": "MIT",
+  "name": "@npmcli/move-file",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/move-file.git"
+  },
+  "scripts": {
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preversion": "npm test",
+    "snap": "tap",
+    "test": "tap"
+  },
+  "tap": {
+    "check-coverage": true
+  },
+  "version": "1.1.2"
+}

+ 51 - 0
node_modules/aggregate-error/index.d.ts

@@ -0,0 +1,51 @@
+/**
+Create an error from multiple errors.
+*/
+declare class AggregateError<T extends Error = Error> extends Error implements Iterable<T> {
+	readonly name: 'AggregateError';
+
+	/**
+	@param errors - If a string, a new `Error` is created with the string as the error message. If a non-Error object, a new `Error` is created with all properties from the object copied over.
+	@returns An Error that is also an [`Iterable`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Iterables) for the individual errors.
+
+	@example
+	```
+	import AggregateError = require('aggregate-error');
+
+	const error = new AggregateError([new Error('foo'), 'bar', {message: 'baz'}]);
+
+	throw error;
+
+	// AggregateError:
+	//	Error: foo
+	//		at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:33)
+	//	Error: bar
+	//		at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
+	//	Error: baz
+	//		at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
+	//	at AggregateError (/Users/sindresorhus/dev/aggregate-error/index.js:19:3)
+	//	at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
+	//	at Module._compile (module.js:556:32)
+	//	at Object.Module._extensions..js (module.js:565:10)
+	//	at Module.load (module.js:473:32)
+	//	at tryModuleLoad (module.js:432:12)
+	//	at Function.Module._load (module.js:424:3)
+	//	at Module.runMain (module.js:590:10)
+	//	at run (bootstrap_node.js:394:7)
+	//	at startup (bootstrap_node.js:149:9)
+
+
+	for (const individualError of error) {
+		console.log(individualError);
+	}
+	//=> [Error: foo]
+	//=> [Error: bar]
+	//=> [Error: baz]
+	```
+	*/
+	constructor(errors: ReadonlyArray<T | {[key: string]: any} | string>);
+
+	[Symbol.iterator](): IterableIterator<T>;
+}
+
+export = AggregateError;

+ 47 - 0
node_modules/aggregate-error/index.js

@@ -0,0 +1,47 @@
+'use strict';
+const indentString = require('indent-string');
+const cleanStack = require('clean-stack');
+
+const cleanInternalStack = stack => stack.replace(/\s+at .*aggregate-error\/index.js:\d+:\d+\)?/g, '');
+
+class AggregateError extends Error {
+	constructor(errors) {
+		if (!Array.isArray(errors)) {
+			throw new TypeError(`Expected input to be an Array, got ${typeof errors}`);
+		}
+
+		errors = [...errors].map(error => {
+			if (error instanceof Error) {
+				return error;
+			}
+
+			if (error !== null && typeof error === 'object') {
+				// Handle plain error objects with message property and/or possibly other metadata
+				return Object.assign(new Error(error.message), error);
+			}
+
+			return new Error(error);
+		});
+
+		let message = errors
+			.map(error => {
+				// The `stack` property is not standardized, so we can't assume it exists
+				return typeof error.stack === 'string' ? cleanInternalStack(cleanStack(error.stack)) : String(error);
+			})
+			.join('\n');
+		message = '\n' + indentString(message, 4);
+		super(message);
+
+		this.name = 'AggregateError';
+
+		Object.defineProperty(this, '_errors', {value: errors});
+	}
+
+	* [Symbol.iterator]() {
+		for (const error of this._errors) {
+			yield error;
+		}
+	}
+}
+
+module.exports = AggregateError;

+ 9 - 0
node_modules/aggregate-error/license

@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 73 - 0
node_modules/aggregate-error/package.json

@@ -0,0 +1,73 @@
+{
+  "_from": "aggregate-error@^3.0.0",
+  "_id": "aggregate-error@3.1.0",
+  "_inBundle": false,
+  "_integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
+  "_location": "/aggregate-error",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "aggregate-error@^3.0.0",
+    "name": "aggregate-error",
+    "escapedName": "aggregate-error",
+    "rawSpec": "^3.0.0",
+    "saveSpec": null,
+    "fetchSpec": "^3.0.0"
+  },
+  "_requiredBy": [
+    "/p-map"
+  ],
+  "_resolved": "https://registry.npmmirror.com/aggregate-error/-/aggregate-error-3.1.0.tgz",
+  "_shasum": "92670ff50f5359bdb7a3e0d40d0ec30c5737687a",
+  "_spec": "aggregate-error@^3.0.0",
+  "_where": "E:\\psy_web_share\\node_modules\\p-map",
+  "author": {
+    "name": "Sindre Sorhus",
+    "email": "sindresorhus@gmail.com",
+    "url": "sindresorhus.com"
+  },
+  "bugs": {
+    "url": "https://github.com/sindresorhus/aggregate-error/issues"
+  },
+  "bundleDependencies": false,
+  "dependencies": {
+    "clean-stack": "^2.0.0",
+    "indent-string": "^4.0.0"
+  },
+  "deprecated": false,
+  "description": "Create an error from multiple errors",
+  "devDependencies": {
+    "ava": "^2.4.0",
+    "tsd": "^0.7.1",
+    "xo": "^0.25.3"
+  },
+  "engines": {
+    "node": ">=8"
+  },
+  "files": [
+    "index.js",
+    "index.d.ts"
+  ],
+  "homepage": "https://github.com/sindresorhus/aggregate-error#readme",
+  "keywords": [
+    "aggregate",
+    "error",
+    "combine",
+    "multiple",
+    "many",
+    "collection",
+    "iterable",
+    "iterator"
+  ],
+  "license": "MIT",
+  "name": "aggregate-error",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sindresorhus/aggregate-error.git"
+  },
+  "scripts": {
+    "test": "xo && ava && tsd"
+  },
+  "version": "3.1.0"
+}

+ 61 - 0
node_modules/aggregate-error/readme.md

@@ -0,0 +1,61 @@
+# aggregate-error [![Build Status](https://travis-ci.org/sindresorhus/aggregate-error.svg?branch=master)](https://travis-ci.org/sindresorhus/aggregate-error)
+
+> Create an error from multiple errors
+
+
+## Install
+
+```
+$ npm install aggregate-error
+```
+
+
+## Usage
+
+```js
+const AggregateError = require('aggregate-error');
+
+const error = new AggregateError([new Error('foo'), 'bar', {message: 'baz'}]);
+
+throw error;
+/*
+AggregateError:
+    Error: foo
+        at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:33)
+    Error: bar
+        at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
+    Error: baz
+        at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
+    at AggregateError (/Users/sindresorhus/dev/aggregate-error/index.js:19:3)
+    at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
+    at Module._compile (module.js:556:32)
+    at Object.Module._extensions..js (module.js:565:10)
+    at Module.load (module.js:473:32)
+    at tryModuleLoad (module.js:432:12)
+    at Function.Module._load (module.js:424:3)
+    at Module.runMain (module.js:590:10)
+    at run (bootstrap_node.js:394:7)
+    at startup (bootstrap_node.js:149:9)
+*/
+
+for (const individualError of error) {
+	console.log(individualError);
+}
+//=> [Error: foo]
+//=> [Error: bar]
+//=> [Error: baz]
+```
+
+
+## API
+
+### AggregateError(errors)
+
+Returns an `Error` that is also an [`Iterable`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Iterables) for the individual errors.
+
+#### errors
+
+Type: `Array<Error|Object|string>`
+
+If a string, a new `Error` is created with the string as the error message.<br>
+If a non-Error object, a new `Error` is created with all properties from the object copied over.

+ 2 - 0
node_modules/balanced-match/.github/FUNDING.yml

@@ -0,0 +1,2 @@
+tidelift: "npm/balanced-match"
+patreon: juliangruber

+ 21 - 0
node_modules/balanced-match/LICENSE.md

@@ -0,0 +1,21 @@
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 97 - 0
node_modules/balanced-match/README.md

@@ -0,0 +1,97 @@
+# balanced-match
+
+Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
+
+[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match)
+[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match)
+
+[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match)
+
+## Example
+
+Get the first matching pair of braces:
+
+```js
+var balanced = require('balanced-match');
+
+console.log(balanced('{', '}', 'pre{in{nested}}post'));
+console.log(balanced('{', '}', 'pre{first}between{second}post'));
+console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre  {   in{nest}   }  post'));
+```
+
+The matches are:
+
+```bash
+$ node example.js
+{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
+{ start: 3,
+  end: 9,
+  pre: 'pre',
+  body: 'first',
+  post: 'between{second}post' }
+{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
+```
+
+## API
+
+### var m = balanced(a, b, str)
+
+For the first non-nested matching pair of `a` and `b` in `str`, return an
+object with those keys:
+
+* **start** the index of the first match of `a`
+* **end** the index of the matching `b`
+* **pre** the preamble, `a` and `b` not included
+* **body** the match, `a` and `b` not included
+* **post** the postscript, `a` and `b` not included
+
+If there's no match, `undefined` will be returned.
+
+If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
+
+### var r = balanced.range(a, b, str)
+
+For the first non-nested matching pair of `a` and `b` in `str`, return an
+array with indexes: `[ <a index>, <b index> ]`.
+
+If there's no match, `undefined` will be returned.
+
+If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
+
+## Installation
+
+With [npm](https://npmjs.org) do:
+
+```bash
+npm install balanced-match
+```
+
+## Security contact information
+
+To report a security vulnerability, please use the
+[Tidelift security contact](https://tidelift.com/security).
+Tidelift will coordinate the fix and disclosure.
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 62 - 0
node_modules/balanced-match/index.js

@@ -0,0 +1,62 @@
+'use strict';
+module.exports = balanced;
+function balanced(a, b, str) {
+  if (a instanceof RegExp) a = maybeMatch(a, str);
+  if (b instanceof RegExp) b = maybeMatch(b, str);
+
+  var r = range(a, b, str);
+
+  return r && {
+    start: r[0],
+    end: r[1],
+    pre: str.slice(0, r[0]),
+    body: str.slice(r[0] + a.length, r[1]),
+    post: str.slice(r[1] + b.length)
+  };
+}
+
+function maybeMatch(reg, str) {
+  var m = str.match(reg);
+  return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+  var begs, beg, left, right, result;
+  var ai = str.indexOf(a);
+  var bi = str.indexOf(b, ai + 1);
+  var i = ai;
+
+  if (ai >= 0 && bi > 0) {
+    if(a===b) {
+      return [ai, bi];
+    }
+    begs = [];
+    left = str.length;
+
+    while (i >= 0 && !result) {
+      if (i == ai) {
+        begs.push(i);
+        ai = str.indexOf(a, i + 1);
+      } else if (begs.length == 1) {
+        result = [ begs.pop(), bi ];
+      } else {
+        beg = begs.pop();
+        if (beg < left) {
+          left = beg;
+          right = bi;
+        }
+
+        bi = str.indexOf(b, i + 1);
+      }
+
+      i = ai < bi && ai >= 0 ? ai : bi;
+    }
+
+    if (begs.length) {
+      result = [ left, right ];
+    }
+  }
+
+  return result;
+}

+ 76 - 0
node_modules/balanced-match/package.json

@@ -0,0 +1,76 @@
+{
+  "_from": "balanced-match@^1.0.0",
+  "_id": "balanced-match@1.0.2",
+  "_inBundle": false,
+  "_integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+  "_location": "/balanced-match",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "balanced-match@^1.0.0",
+    "name": "balanced-match",
+    "escapedName": "balanced-match",
+    "rawSpec": "^1.0.0",
+    "saveSpec": null,
+    "fetchSpec": "^1.0.0"
+  },
+  "_requiredBy": [
+    "/brace-expansion"
+  ],
+  "_resolved": "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz",
+  "_shasum": "e83e3a7e3f300b34cb9d87f615fa0cbf357690ee",
+  "_spec": "balanced-match@^1.0.0",
+  "_where": "E:\\psy_web_share\\node_modules\\brace-expansion",
+  "author": {
+    "name": "Julian Gruber",
+    "email": "mail@juliangruber.com",
+    "url": "http://juliangruber.com"
+  },
+  "bugs": {
+    "url": "https://github.com/juliangruber/balanced-match/issues"
+  },
+  "bundleDependencies": false,
+  "deprecated": false,
+  "description": "Match balanced character pairs, like \"{\" and \"}\"",
+  "devDependencies": {
+    "matcha": "^0.7.0",
+    "tape": "^4.6.0"
+  },
+  "homepage": "https://github.com/juliangruber/balanced-match",
+  "keywords": [
+    "match",
+    "regexp",
+    "test",
+    "balanced",
+    "parse"
+  ],
+  "license": "MIT",
+  "main": "index.js",
+  "name": "balanced-match",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/juliangruber/balanced-match.git"
+  },
+  "scripts": {
+    "bench": "matcha test/bench.js",
+    "test": "tape test/test.js"
+  },
+  "testling": {
+    "files": "test/*.js",
+    "browsers": [
+      "ie/8..latest",
+      "firefox/20..latest",
+      "firefox/nightly",
+      "chrome/25..latest",
+      "chrome/canary",
+      "opera/12..latest",
+      "opera/next",
+      "safari/5.1..latest",
+      "ipad/6.0..latest",
+      "iphone/6.0..latest",
+      "android-browser/4.2..latest"
+    ]
+  },
+  "version": "1.0.2"
+}

+ 21 - 0
node_modules/brace-expansion/LICENSE

@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 129 - 0
node_modules/brace-expansion/README.md

@@ -0,0 +1,129 @@
+# brace-expansion
+
+[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), 
+as known from sh/bash, in JavaScript.
+
+[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion)
+[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion)
+[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/)
+
+[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion)
+
+## Example
+
+```js
+var expand = require('brace-expansion');
+
+expand('file-{a,b,c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('-v{,,}')
+// => ['-v', '-v', '-v']
+
+expand('file{0..2}.jpg')
+// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
+
+expand('file-{a..c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('file{2..0}.jpg')
+// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
+
+expand('file{0..4..2}.jpg')
+// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
+
+expand('file-{a..e..2}.jpg')
+// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
+
+expand('file{00..10..5}.jpg')
+// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
+
+expand('{{A..C},{a..c}}')
+// => ['A', 'B', 'C', 'a', 'b', 'c']
+
+expand('ppp{,config,oe{,conf}}')
+// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
+```
+
+## API
+
+```js
+var expand = require('brace-expansion');
+```
+
+### var expanded = expand(str)
+
+Return an array of all possible and valid expansions of `str`. If none are
+found, `[str]` is returned.
+
+Valid expansions are:
+
+```js
+/^(.*,)+(.+)?$/
+// {a,b,...}
+```
+
+A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+A numeric sequence from `x` to `y` inclusive, with optional increment.
+If `x` or `y` start with a leading `0`, all the numbers will be padded
+to have equal length. Negative numbers and backwards iteration work too.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+An alphabetic sequence from `x` to `y` inclusive, with optional increment.
+`x` and `y` must be exactly one character, and if given, `incr` must be a
+number.
+
+For compatibility reasons, the string `${` is not eligible for brace expansion.
+
+## Installation
+
+With [npm](https://npmjs.org) do:
+
+```bash
+npm install brace-expansion
+```
+
+## Contributors
+
+- [Julian Gruber](https://github.com/juliangruber)
+- [Isaac Z. Schlueter](https://github.com/isaacs)
+
+## Sponsors
+
+This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
+
+Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.

+ 201 - 0
node_modules/brace-expansion/index.js

@@ -0,0 +1,201 @@
+var concatMap = require('concat-map');
+var balanced = require('balanced-match');
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+  return parseInt(str, 10) == str
+    ? parseInt(str, 10)
+    : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+  return str.split('\\\\').join(escSlash)
+            .split('\\{').join(escOpen)
+            .split('\\}').join(escClose)
+            .split('\\,').join(escComma)
+            .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+  return str.split(escSlash).join('\\')
+            .split(escOpen).join('{')
+            .split(escClose).join('}')
+            .split(escComma).join(',')
+            .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+  if (!str)
+    return [''];
+
+  var parts = [];
+  var m = balanced('{', '}', str);
+
+  if (!m)
+    return str.split(',');
+
+  var pre = m.pre;
+  var body = m.body;
+  var post = m.post;
+  var p = pre.split(',');
+
+  p[p.length-1] += '{' + body + '}';
+  var postParts = parseCommaParts(post);
+  if (post.length) {
+    p[p.length-1] += postParts.shift();
+    p.push.apply(p, postParts);
+  }
+
+  parts.push.apply(parts, p);
+
+  return parts;
+}
+
+function expandTop(str) {
+  if (!str)
+    return [];
+
+  // I don't know why Bash 4.3 does this, but it does.
+  // Anything starting with {} will have the first two bytes preserved
+  // but *only* at the top level, so {},a}b will not expand to anything,
+  // but a{},b}c will be expanded to [a}c,abc].
+  // One could argue that this is a bug in Bash, but since the goal of
+  // this module is to match Bash's rules, we escape a leading {}
+  if (str.substr(0, 2) === '{}') {
+    str = '\\{\\}' + str.substr(2);
+  }
+
+  return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+  return e;
+}
+
+function embrace(str) {
+  return '{' + str + '}';
+}
+function isPadded(el) {
+  return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+  return i <= y;
+}
+function gte(i, y) {
+  return i >= y;
+}
+
+function expand(str, isTop) {
+  var expansions = [];
+
+  var m = balanced('{', '}', str);
+  if (!m || /\$$/.test(m.pre)) return [str];
+
+  var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+  var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+  var isSequence = isNumericSequence || isAlphaSequence;
+  var isOptions = m.body.indexOf(',') >= 0;
+  if (!isSequence && !isOptions) {
+    // {a},b}
+    if (m.post.match(/,.*\}/)) {
+      str = m.pre + '{' + m.body + escClose + m.post;
+      return expand(str);
+    }
+    return [str];
+  }
+
+  var n;
+  if (isSequence) {
+    n = m.body.split(/\.\./);
+  } else {
+    n = parseCommaParts(m.body);
+    if (n.length === 1) {
+      // x{{a,b}}y ==> x{a}y x{b}y
+      n = expand(n[0], false).map(embrace);
+      if (n.length === 1) {
+        var post = m.post.length
+          ? expand(m.post, false)
+          : [''];
+        return post.map(function(p) {
+          return m.pre + n[0] + p;
+        });
+      }
+    }
+  }
+
+  // at this point, n is the parts, and we know it's not a comma set
+  // with a single entry.
+
+  // no need to expand pre, since it is guaranteed to be free of brace-sets
+  var pre = m.pre;
+  var post = m.post.length
+    ? expand(m.post, false)
+    : [''];
+
+  var N;
+
+  if (isSequence) {
+    var x = numeric(n[0]);
+    var y = numeric(n[1]);
+    var width = Math.max(n[0].length, n[1].length)
+    var incr = n.length == 3
+      ? Math.abs(numeric(n[2]))
+      : 1;
+    var test = lte;
+    var reverse = y < x;
+    if (reverse) {
+      incr *= -1;
+      test = gte;
+    }
+    var pad = n.some(isPadded);
+
+    N = [];
+
+    for (var i = x; test(i, y); i += incr) {
+      var c;
+      if (isAlphaSequence) {
+        c = String.fromCharCode(i);
+        if (c === '\\')
+          c = '';
+      } else {
+        c = String(i);
+        if (pad) {
+          var need = width - c.length;
+          if (need > 0) {
+            var z = new Array(need + 1).join('0');
+            if (i < 0)
+              c = '-' + z + c.slice(1);
+            else
+              c = z + c;
+          }
+        }
+      }
+      N.push(c);
+    }
+  } else {
+    N = concatMap(n, function(el) { return expand(el, false) });
+  }
+
+  for (var j = 0; j < N.length; j++) {
+    for (var k = 0; k < post.length; k++) {
+      var expansion = pre + N[j] + post[k];
+      if (!isTop || isSequence || expansion)
+        expansions.push(expansion);
+    }
+  }
+
+  return expansions;
+}
+

+ 75 - 0
node_modules/brace-expansion/package.json

@@ -0,0 +1,75 @@
+{
+  "_from": "brace-expansion@^1.1.7",
+  "_id": "brace-expansion@1.1.11",
+  "_inBundle": false,
+  "_integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+  "_location": "/brace-expansion",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "brace-expansion@^1.1.7",
+    "name": "brace-expansion",
+    "escapedName": "brace-expansion",
+    "rawSpec": "^1.1.7",
+    "saveSpec": null,
+    "fetchSpec": "^1.1.7"
+  },
+  "_requiredBy": [
+    "/minimatch"
+  ],
+  "_resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.11.tgz",
+  "_shasum": "3c7fcbf529d87226f3d2f52b966ff5271eb441dd",
+  "_spec": "brace-expansion@^1.1.7",
+  "_where": "E:\\psy_web_share\\node_modules\\minimatch",
+  "author": {
+    "name": "Julian Gruber",
+    "email": "mail@juliangruber.com",
+    "url": "http://juliangruber.com"
+  },
+  "bugs": {
+    "url": "https://github.com/juliangruber/brace-expansion/issues"
+  },
+  "bundleDependencies": false,
+  "dependencies": {
+    "balanced-match": "^1.0.0",
+    "concat-map": "0.0.1"
+  },
+  "deprecated": false,
+  "description": "Brace expansion as known from sh/bash",
+  "devDependencies": {
+    "matcha": "^0.7.0",
+    "tape": "^4.6.0"
+  },
+  "homepage": "https://github.com/juliangruber/brace-expansion",
+  "keywords": [],
+  "license": "MIT",
+  "main": "index.js",
+  "name": "brace-expansion",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/juliangruber/brace-expansion.git"
+  },
+  "scripts": {
+    "bench": "matcha test/perf/bench.js",
+    "gentest": "bash test/generate.sh",
+    "test": "tape test/*.js"
+  },
+  "testling": {
+    "files": "test/*.js",
+    "browsers": [
+      "ie/8..latest",
+      "firefox/20..latest",
+      "firefox/nightly",
+      "chrome/25..latest",
+      "chrome/canary",
+      "opera/12..latest",
+      "opera/next",
+      "safari/5.1..latest",
+      "ipad/6.0..latest",
+      "iphone/6.0..latest",
+      "android-browser/4.2..latest"
+    ]
+  },
+  "version": "1.1.11"
+}

+ 16 - 0
node_modules/cacache/LICENSE.md

@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.

+ 703 - 0
node_modules/cacache/README.md

@@ -0,0 +1,703 @@
+# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest)
+
+[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing
+local key and content address caches. It's really fast, really good at
+concurrency, and it will never give you corrupted data, even if cache files
+get corrupted or manipulated.
+
+On systems that support user and group settings on files, cacache will
+match the `uid` and `gid` values to the folder where the cache lives, even
+when running as `root`.
+
+It was written to be used as [npm](https://npm.im)'s local cache, but can
+just as easily be used on its own.
+
+## Install
+
+`$ npm install --save cacache`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [Contributing](#contributing)
+* [API](#api)
+  * [Using localized APIs](#localized-api)
+  * Reading
+    * [`ls`](#ls)
+    * [`ls.stream`](#ls-stream)
+    * [`get`](#get-data)
+    * [`get.stream`](#get-stream)
+    * [`get.info`](#get-info)
+    * [`get.hasContent`](#get-hasContent)
+  * Writing
+    * [`put`](#put-data)
+    * [`put.stream`](#put-stream)
+    * [`rm.all`](#rm-all)
+    * [`rm.entry`](#rm-entry)
+    * [`rm.content`](#rm-content)
+    * [`index.compact`](#index-compact)
+    * [`index.insert`](#index-insert)
+  * Utilities
+    * [`clearMemoized`](#clear-memoized)
+    * [`tmp.mkdir`](#tmp-mkdir)
+    * [`tmp.withTmp`](#with-tmp)
+  * Integrity
+    * [Subresource Integrity](#integrity)
+    * [`verify`](#verify)
+    * [`verify.lastRun`](#verify-last-run)
+
+### Example
+
+```javascript
+const cacache = require('cacache')
+const fs = require('fs')
+
+const tarball = '/path/to/mytar.tgz'
+const cachePath = '/tmp/my-toy-cache'
+const key = 'my-unique-key-1234'
+
+// Cache it! Use `cachePath` as the root of the content cache
+cacache.put(cachePath, key, '10293801983029384').then(integrity => {
+  console.log(`Saved content to ${cachePath}.`)
+})
+
+const destination = '/tmp/mytar.tgz'
+
+// Copy the contents out of the cache and into their destination!
+// But this time, use stream instead!
+cacache.get.stream(
+  cachePath, key
+).pipe(
+  fs.createWriteStream(destination)
+).on('finish', () => {
+  console.log('done extracting!')
+})
+
+// The same thing, but skip the key index.
+cacache.get.byDigest(cachePath, integrityHash).then(data => {
+  fs.writeFile(destination, data, err => {
+    console.log('tarball data fetched based on its sha512sum and written out!')
+  })
+})
+```
+
+### Features
+
+* Extraction by key or by content address (shasum, etc)
+* [Subresource Integrity](#integrity) web standard support
+* Multi-hash support - safely host sha1, sha512, etc, in a single cache
+* Automatic content deduplication
+* Fault tolerance (immune to corruption, partial writes, process races, etc)
+* Consistency guarantees on read and write (full data verification)
+* Lockless, high-concurrency cache access
+* Streaming support
+* Promise support
+* Fast -- sub-millisecond reads and writes including verification
+* Arbitrary metadata storage
+* Garbage collection and additional offline verification
+* Thorough test coverage
+* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
+
+### Contributing
+
+The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
+
+All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
+
+Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
+
+Happy hacking!
+
+### API
+
+#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
+
+Lists info for all entries currently in the cache as a single large object. Each
+entry in the object will be keyed by the unique index key, with corresponding
+[`get.info`](#get-info) objects as the values.
+
+##### Example
+
+```javascript
+cacache.ls(cachePath).then(console.log)
+// Output
+{
+  'my-thing': {
+    key: 'my-thing',
+    integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
+    path: '.testcache/content/deadbeef', // joined with `cachePath`
+    time: 12345698490,
+    size: 4023948,
+    metadata: {
+      name: 'blah',
+      version: '1.2.3',
+      description: 'this was once a package but now it is my-thing'
+    }
+  },
+  'other-thing': {
+    key: 'other-thing',
+    integrity: 'sha1-ANothER+hasH=',
+    path: '.testcache/content/bada55',
+    time: 11992309289,
+    size: 111112
+  }
+}
+```
+
+#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
+
+Lists info for all entries currently in the cache as a single large object.
+
+This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
+returned as `'data'` events on the returned stream.
+
+##### Example
+
+```javascript
+cacache.ls.stream(cachePath).on('data', console.log)
+// Output
+{
+  key: 'my-thing',
+  integrity: 'sha512-BaSe64HaSh',
+  path: '.testcache/content/deadbeef', // joined with `cachePath`
+  time: 12345698490,
+  size: 13423,
+  metadata: {
+    name: 'blah',
+    version: '1.2.3',
+    description: 'this was once a package but now it is my-thing'
+  }
+}
+
+{
+  key: 'other-thing',
+  integrity: 'whirlpool-WoWSoMuchSupport',
+  path: '.testcache/content/bada55',
+  time: 11992309289,
+  size: 498023984029
+}
+
+{
+  ...
+}
+```
+
+#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
+
+Returns an object with the cached data, digest, and metadata identified by
+`key`. The `data` property of this object will be a `Buffer` instance that
+presumably holds some data that means something to you. I'm sure you know what
+to do with it! cacache just won't care.
+
+`integrity` is a [Subresource
+Integrity](#integrity)
+string. That is, a string that can be used to verify `data`, which looks like
+`<hash-algorithm>-<base64-integrity-hash>`.
+
+If there is no content identified by `key`, or if the locally-stored data does
+not pass the validity checksum, the promise will be rejected.
+
+A sub-function, `get.byDigest` may be used for identical behavior, except lookup
+will happen by integrity hash, bypassing the index entirely. This version of the
+function *only* returns `data` itself, without any wrapper.
+
+See: [options](#get-options)
+
+##### Note
+
+This function loads the entire cache entry into memory before returning it. If
+you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
+instead.
+
+##### Example
+
+```javascript
+// Look up by key
+cache.get(cachePath, 'my-thing').then(console.log)
+// Output:
+{
+  metadata: {
+    thingName: 'my'
+  },
+  integrity: 'sha512-BaSe64HaSh',
+  data: Buffer#<deadbeef>,
+  size: 9320
+}
+
+// Look up by digest
+cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
+// Output:
+Buffer#<deadbeef>
+```
+
+#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
+
+Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
+
+If there is no content identified by `key`, or if the locally-stored data does
+not pass the validity checksum, an error will be emitted.
+
+`metadata` and `integrity` events will be emitted before the stream closes, if
+you need to collect that extra data about the cached entry.
+
+A sub-function, `get.stream.byDigest` may be used for identical behavior,
+except lookup will happen by integrity hash, bypassing the index entirely. This
+version does not emit the `metadata` and `integrity` events at all.
+
+See: [options](#get-options)
+
+##### Example
+
+```javascript
+// Look up by key
+cache.get.stream(
+  cachePath, 'my-thing'
+).on('metadata', metadata => {
+  console.log('metadata:', metadata)
+}).on('integrity', integrity => {
+  console.log('integrity:', integrity)
+}).pipe(
+  fs.createWriteStream('./x.tgz')
+)
+// Outputs:
+metadata: { ... }
+integrity: 'sha512-SoMeDIGest+64=='
+
+// Look up by digest
+cache.get.stream.byDigest(
+  cachePath, 'sha512-SoMeDIGest+64=='
+).pipe(
+  fs.createWriteStream('./x.tgz')
+)
+```
+
+#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
+
+Looks up `key` in the cache index, returning information about the entry if
+one exists.
+
+##### Fields
+
+* `key` - Key the entry was looked up under. Matches the `key` argument.
+* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
+* `path` - Filesystem path where content is stored, joined with `cache` argument.
+* `time` - Timestamp the entry was first added on.
+* `metadata` - User-assigned metadata associated with the entry/content.
+
+##### Example
+
+```javascript
+cacache.get.info(cachePath, 'my-thing').then(console.log)
+
+// Output
+{
+  key: 'my-thing',
+  integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
+  path: '.testcache/content/deadbeef',
+  time: 12345698490,
+  size: 849234,
+  metadata: {
+    name: 'blah',
+    version: '1.2.3',
+    description: 'this was once a package but now it is my-thing'
+  }
+}
+```
+
+#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
+
+Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
+exists for this `integrity`, it will return an object, with the specific single integrity hash
+that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
+
+##### Example
+
+```javascript
+cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
+
+// Output
+{
+  sri: {
+    source: 'sha256-MUSTVERIFY+ALL/THINGS==',
+    algorithm: 'sha256',
+    digest: 'MUSTVERIFY+ALL/THINGS==',
+    options: []
+  },
+  size: 9001
+}
+
+cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
+
+// Output
+false
+```
+
+##### <a name="get-options"></a> Options
+
+##### `opts.integrity`
+If present, the pre-calculated digest for the inserted content. If this option
+is provided and does not match the post-insertion digest, insertion will fail
+with an `EINTEGRITY` error.
+
+##### `opts.memoize`
+
+Default: null
+
+If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache.
+
+##### `opts.size`
+If provided, the data stream will be verified to check that enough data was
+passed through. If there's more or less data than expected, insertion will fail
+with an `EBADSIZE` error.
+
+
+#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
+
+Inserts data passed to it into the cache. The returned Promise resolves with a
+digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
+cache entry has been successfully written.
+
+See: [options](#put-options)
+
+##### Example
+
+```javascript
+fetch(
+  'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
+).then(data => {
+  return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
+}).then(integrity => {
+  console.log('integrity hash is', integrity)
+})
+```
+
+#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
+
+Returns a [Writable
+Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
+data written to it into the cache. Emits an `integrity` event with the digest of
+written contents when it succeeds.
+
+See: [options](#put-options)
+
+##### Example
+
+```javascript
+request.get(
+  'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
+).pipe(
+  cacache.put.stream(
+    cachePath, 'registry.npmjs.org|cacache@1.0.0'
+  ).on('integrity', d => console.log(`integrity digest is ${d}`))
+)
+```
+
+##### <a name="put-options"></a> Options
+
+##### `opts.metadata`
+
+Arbitrary metadata to be attached to the inserted key.
+
+##### `opts.size`
+
+If provided, the data stream will be verified to check that enough data was
+passed through. If there's more or less data than expected, insertion will fail
+with an `EBADSIZE` error.
+
+##### `opts.integrity`
+
+If present, the pre-calculated digest for the inserted content. If this option
+is provided and does not match the post-insertion digest, insertion will fail
+with an `EINTEGRITY` error.
+
+`algorithms` has no effect if this option is present.
+
+##### `opts.algorithms`
+
+Default: ['sha512']
+
+Hashing algorithms to use when calculating the [subresource integrity
+digest](#integrity)
+for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
+`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
+may also use any anagram of `'modnar'` to use this feature.
+
+Currently only supports one algorithm at a time (i.e., an array length of
+exactly `1`). Has no effect if `opts.integrity` is present.
+
+##### `opts.memoize`
+
+Default: null
+
+If provided, cacache will memoize the given cache insertion in memory, bypassing
+any filesystem checks for that key or digest in future cache fetches. Nothing
+will be written to the in-memory cache unless this option is explicitly truthy.
+
+If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
+and `set` methods), it will be written to instead of the global memoization
+cache.
+
+Reading from disk data can be forced by explicitly passing `memoize: false` to
+the reader functions, but their default will be to read from memory.
+
+##### `opts.tmpPrefix`
+Default: null
+
+Prefix to append on the temporary directory name inside the cache's tmp dir. 
+
+#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
+
+Clears the entire cache. Mainly by blowing away the cache directory itself.
+
+##### Example
+
+```javascript
+cacache.rm.all(cachePath).then(() => {
+  console.log('THE APOCALYPSE IS UPON US 😱')
+})
+```
+
+#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key, [opts]) -> Promise`
+
+Alias: `cacache.rm`
+
+Removes the index entry for `key`. Content will still be accessible if
+requested directly by content address ([`get.stream.byDigest`](#get-stream)).
+
+By default, this appends a new entry to the index with an integrity of `null`.
+If `opts.removeFully` is set to `true` then the index file itself will be
+physically deleted rather than appending a `null`.
+
+To remove the content itself (which might still be used by other entries), use
+[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
+[`verify`](#verify).
+
+##### Example
+
+```javascript
+cacache.rm.entry(cachePath, 'my-thing').then(() => {
+  console.log('I did not like it anyway')
+})
+```
+
+#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
+
+Removes the content identified by `integrity`. Any index entries referring to it
+will not be usable again until the content is re-added to the cache with an
+identical digest.
+
+##### Example
+
+```javascript
+cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
+  console.log('data for my-thing is gone!')
+})
+```
+
+#### <a name="index-compact"></a> `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise`
+
+Uses `matchFn`, which must be a synchronous function that accepts two entries
+and returns a boolean indicating whether or not the two entries match, to
+deduplicate all entries in the cache for the given `key`.
+
+If `opts.validateEntry` is provided, it will be called as a function with the
+only parameter being a single index entry. The function must return a Boolean,
+if it returns `true` the entry is considered valid and will be kept in the index,
+if it returns `false` the entry will be removed from the index.
+
+If `opts.validateEntry` is not provided, however, every entry in the index will
+be deduplicated and kept until the first `null` integrity is reached, removing
+all entries that were written before the `null`.
+
+The deduplicated list of entries is both written to the index, replacing the
+existing content, and returned in the Promise.
+
+#### <a name="index-insert"></a> `> cacache.index.insert(cache, key, integrity, opts) -> Promise`
+
+Writes an index entry to the cache for the given `key` without writing content.
+
+It is assumed if you are using this method, you have already stored the content
+some other way and you only wish to add a new index to that content. The `metadata`
+and `size` properties are read from `opts` and used as part of the index entry.
+
+Returns a Promise resolving to the newly added entry.
+
+#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
+
+Completely resets the in-memory entry cache.
+
+#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
+
+Returns a unique temporary directory inside the cache's `tmp` dir. This
+directory will use the same safe user assignment that all the other stuff use.
+
+Once the directory is made, it's the user's responsibility that all files
+within are given the appropriate `gid`/`uid` ownership settings to match
+the rest of the cache. If not, you can ask cacache to do it for you by
+calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory
+permissions.
+
+If you want automatic cleanup of this directory, use
+[`tmp.withTmp()`](#with-tpm)
+
+See: [options](#tmp-options)
+
+##### Example
+
+```javascript
+cacache.tmp.mkdir(cache).then(dir => {
+  fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
+})
+```
+
+#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise`
+
+Sets the `uid` and `gid` properties on all files and folders within the tmp
+folder to match the rest of the cache.
+
+Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or
+[`tmp.withTmp`](#with-tmp).
+
+##### Example
+
+```javascript
+cacache.tmp.mkdir(cache).then(dir => {
+  writeFile(path.join(dir, 'file'), someData).then(() => {
+    // make sure we didn't just put a root-owned file in the cache
+    cacache.tmp.fix().then(() => {
+      // all uids and gids match now
+    })
+  })
+})
+```
+
+#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
+
+Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
+with it. The created temporary directory will be removed when the return value
+of `cb()` resolves, the tmp directory will be automatically deleted once that 
+promise completes.
+
+The same caveats apply when it comes to managing permissions for the tmp dir's
+contents.
+
+See: [options](#tmp-options)
+
+##### Example
+
+```javascript
+cacache.tmp.withTmp(cache, dir => {
+  return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
+}).then(() => {
+  // `dir` no longer exists
+})
+```
+
+##### <a name="tmp-options"></a> Options
+
+##### `opts.tmpPrefix`
+Default: null
+
+Prefix to append on the temporary directory name inside the cache's tmp dir. 
+
+#### <a name="integrity"></a> Subresource Integrity Digests
+
+For content verification and addressing, cacache uses strings following the
+[Subresource
+Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
+That is, any time cacache expects an `integrity` argument or option, it
+should be in the format `<hashAlgorithm>-<base64-hash>`.
+
+One deviation from the current spec is that cacache will support any hash
+algorithms supported by the underlying Node.js process. You can use
+`crypto.getHashes()` to see which ones you can use.
+
+##### Generating Digests Yourself
+
+If you have an existing content shasum, they are generally formatted as a
+hexadecimal string (that is, a sha1 would look like:
+`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
+cacache, you'll need to convert this to an equivalent subresource integrity
+string. For this example, the corresponding hash would be:
+`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
+
+If you want to generate an integrity string yourself for existing data, you can
+use something like this:
+
+```javascript
+const crypto = require('crypto')
+const hashAlgorithm = 'sha512'
+const data = 'foobarbaz'
+
+const integrity = (
+  hashAlgorithm +
+  '-' +
+  crypto.createHash(hashAlgorithm).update(data).digest('base64')
+)
+```
+
+You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
+around SRI strings, including generation, parsing, and translating from existing
+hex-formatted strings.
+
+#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
+
+Checks out and fixes up your cache:
+
+* Cleans up corrupted or invalid index entries.
+* Custom entry filtering options.
+* Garbage collects any content entries not referenced by the index.
+* Checks integrity for all content entries and removes invalid content.
+* Fixes cache ownership.
+* Removes the `tmp` directory in the cache and all its contents.
+
+When it's done, it'll return an object with various stats about the verification
+process, including amount of storage reclaimed, number of valid entries, number
+of entries removed, etc.
+
+##### <a name="verify-options"></a> Options
+
+##### `opts.concurrency`
+
+Default: 20
+
+Number of concurrently read files in the filesystem while doing clean up.
+
+##### `opts.filter`
+Receives a formatted entry. Return false to remove it.
+Note: might be called more than once on the same entry.
+
+##### `opts.log`
+Custom logger function:
+```
+  log: { silly () {} }
+  log.silly('verify', 'verifying cache at', cache)
+```
+
+##### Example
+
+```sh
+echo somegarbage >> $CACHEPATH/content/deadbeef
+```
+
+```javascript
+cacache.verify(cachePath).then(stats => {
+  // deadbeef collected, because of invalid checksum.
+  console.log('cache is much nicer now! stats:', stats)
+})
+```
+
+#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
+
+Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
+
+##### Example
+
+```javascript
+cacache.verify(cachePath).then(() => {
+  cacache.verify.lastRun(cachePath).then(lastTime => {
+    console.log('cacache.verify was last called on' + lastTime)
+  })
+})
+```

+ 237 - 0
node_modules/cacache/get.js

@@ -0,0 +1,237 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const Minipass = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const fs = require('fs')
+const util = require('util')
+
+const index = require('./lib/entry-index')
+const memo = require('./lib/memoization')
+const read = require('./lib/content/read')
+
+const writeFile = util.promisify(fs.writeFile)
+
+function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve({
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    })
+  }
+
+  return index.find(cache, key, opts).then((entry) => {
+    if (!entry)
+      throw new index.NotFoundError(cache, key)
+
+    return read(cache, entry.integrity, { integrity, size }).then((data) => {
+      if (memoize)
+        memo.put(cache, entry, data, opts)
+
+      return {
+        data,
+        metadata: entry.metadata,
+        size: entry.size,
+        integrity: entry.integrity,
+      }
+    })
+  })
+}
+module.exports = getData
+
+function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false)
+    return Promise.resolve(memoized)
+
+  return read(cache, key, { integrity, size }).then((res) => {
+    if (memoize)
+      memo.put.byDigest(cache, key, res, opts)
+    return res
+  })
+}
+module.exports.byDigest = getDataByDigest
+
+function getDataSync (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+  const entry = index.find.sync(cache, key, opts)
+  if (!entry)
+    throw new index.NotFoundError(cache, key)
+  const data = read.sync(cache, entry.integrity, {
+    integrity: integrity,
+    size: size,
+  })
+  const res = {
+    metadata: entry.metadata,
+    data: data,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+  if (memoize)
+    memo.put(cache, entry, res.data, opts)
+
+  return res
+}
+
+module.exports.sync = getDataSync
+
+function getDataByDigestSync (cache, digest, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, digest, opts)
+
+  if (memoized && memoize !== false)
+    return memoized
+
+  const res = read.sync(cache, digest, {
+    integrity: integrity,
+    size: size,
+  })
+  if (memoize)
+    memo.put.byDigest(cache, digest, res, opts)
+
+  return res
+}
+module.exports.sync.byDigest = getDataByDigestSync
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false)
+    return getMemoizedStream(memoized)
+
+  const stream = new Pipeline()
+  index
+    .find(cache, key)
+    .then((entry) => {
+      if (!entry)
+        throw new index.NotFoundError(cache, key)
+
+      stream.emit('metadata', entry.metadata)
+      stream.emit('integrity', entry.integrity)
+      stream.emit('size', entry.size)
+      stream.on('newListener', function (ev, cb) {
+        ev === 'metadata' && cb(entry.metadata)
+        ev === 'integrity' && cb(entry.integrity)
+        ev === 'size' && cb(entry.size)
+      })
+
+      const src = read.readStream(
+        cache,
+        entry.integrity,
+        { ...opts, size: typeof size !== 'number' ? entry.size : size }
+      )
+
+      if (memoize) {
+        const memoStream = new Collect.PassThrough()
+        memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+        stream.unshift(memoStream)
+      }
+      stream.unshift(src)
+    })
+    .catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize)
+      return stream
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false)
+    return Promise.resolve(memoized.entry)
+  else
+    return index.find(cache, key)
+}
+module.exports.info = info
+
+function copy (cache, key, dest, opts = {}) {
+  if (read.copy) {
+    return index.find(cache, key, opts).then((entry) => {
+      if (!entry)
+        throw new index.NotFoundError(cache, key)
+      return read.copy(cache, entry.integrity, dest, opts)
+        .then(() => {
+          return {
+            metadata: entry.metadata,
+            size: entry.size,
+            integrity: entry.integrity,
+          }
+        })
+    })
+  }
+
+  return getData(cache, key, opts).then((res) => {
+    return writeFile(dest, res.data).then(() => {
+      return {
+        metadata: res.metadata,
+        size: res.size,
+        integrity: res.integrity,
+      }
+    })
+  })
+}
+module.exports.copy = copy
+
+function copyByDigest (cache, key, dest, opts = {}) {
+  if (read.copy)
+    return read.copy(cache, key, dest, opts).then(() => key)
+
+  return getDataByDigest(cache, key, opts).then((res) => {
+    return writeFile(dest, res).then(() => key)
+  })
+}
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent

+ 46 - 0
node_modules/cacache/index.js

@@ -0,0 +1,46 @@
+'use strict'
+
+const ls = require('./ls.js')
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./lib/memoization.js')
+const tmp = require('./lib/util/tmp.js')
+const index = require('./lib/entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = ls
+module.exports.ls.stream = ls.stream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.sync = get.sync
+module.exports.get.sync.byDigest = get.sync.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+module.exports.get.hasContent.sync = get.hasContent.sync
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun

+ 29 - 0
node_modules/cacache/lib/content/path.js

@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}

+ 244 - 0
node_modules/cacache/lib/content/read.js

@@ -0,0 +1,244 @@
+'use strict'
+
+const util = require('util')
+
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+const lstat = util.promisify(fs.lstat)
+const readFile = util.promisify(fs.readFile)
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    // get size
+    return lstat(cpath).then(stat => ({ stat, cpath, sri }))
+  }).then(({ stat, cpath, sri }) => {
+    if (typeof size === 'number' && stat.size !== size)
+      throw sizeError(size, stat.size)
+
+    if (stat.size > MAX_SINGLE_READ_SIZE)
+      return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+
+    return readFile(cpath, null).then((data) => {
+      if (!ssri.checkData(data, sri))
+        throw integrityError(sri, cpath)
+
+      return data
+    })
+  })
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.sync = readSync
+
+function readSync (cache, integrity, opts = {}) {
+  const { size } = opts
+  return withContentSriSync(cache, integrity, (cpath, sri) => {
+    const data = fs.readFileSync(cpath)
+    if (typeof size === 'number' && size !== data.length)
+      throw sizeError(size, data.length)
+
+    if (ssri.checkData(data, sri))
+      return data
+
+    throw integrityError(sri, cpath)
+  })
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  withContentSri(cache, integrity, (cpath, sri) => {
+    // just lstat to ensure it exists
+    return lstat(cpath).then((stat) => ({ stat, cpath, sri }))
+  }).then(({ stat, cpath, sri }) => {
+    if (typeof size === 'number' && size !== stat.size)
+      return stream.emit('error', sizeError(size, stat.size))
+
+    readPipeline(cpath, stat.size, sri, stream)
+  }, er => stream.emit('error', er))
+
+  return stream
+}
+
+let copyFile
+if (fs.copyFile) {
+  module.exports.copy = copy
+  module.exports.copy.sync = copySync
+  copyFile = util.promisify(fs.copyFile)
+}
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return copyFile(cpath, dest)
+  })
+}
+
+function copySync (cache, integrity, dest) {
+  return withContentSriSync(cache, integrity, (cpath, sri) => {
+    return fs.copyFileSync(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+function hasContent (cache, integrity) {
+  if (!integrity)
+    return Promise.resolve(false)
+
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
+  }).catch((err) => {
+    if (err.code === 'ENOENT')
+      return false
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32')
+        throw err
+      else
+        return false
+    }
+  })
+}
+
+module.exports.hasContent.sync = hasContentSync
+
+function hasContentSync (cache, integrity) {
+  if (!integrity)
+    return false
+
+  return withContentSriSync(cache, integrity, (cpath, sri) => {
+    try {
+      const stat = fs.lstatSync(cpath)
+      return { size: stat.size, sri, stat }
+    } catch (err) {
+      if (err.code === 'ENOENT')
+        return false
+
+      if (err.code === 'EPERM') {
+        /* istanbul ignore else */
+        if (process.platform !== 'win32')
+          throw err
+        else
+          return false
+      }
+    }
+  })
+}
+
+function withContentSri (cache, integrity, fn) {
+  const tryFn = () => {
+    const sri = ssri.parse(integrity)
+    // If `integrity` has multiple entries, pick the first digest
+    // with available local data.
+    const algo = sri.pickAlgorithm()
+    const digests = sri[algo]
+
+    if (digests.length <= 1) {
+      const cpath = contentPath(cache, digests[0])
+      return fn(cpath, digests[0])
+    } else {
+      // Can't use race here because a generic error can happen before
+      // a ENOENT error, and can happen before a valid result
+      return Promise
+        .all(digests.map((meta) => {
+          return withContentSri(cache, meta, fn)
+            .catch((err) => {
+              if (err.code === 'ENOENT') {
+                return Object.assign(
+                  new Error('No matching content found for ' + sri.toString()),
+                  { code: 'ENOENT' }
+                )
+              }
+              return err
+            })
+        }))
+        .then((results) => {
+          // Return the first non error if it is found
+          const result = results.find((r) => !(r instanceof Error))
+          if (result)
+            return result
+
+          // Throw the No matching content found error
+          const enoentError = results.find((r) => r.code === 'ENOENT')
+          if (enoentError)
+            throw enoentError
+
+          // Throw generic error
+          throw results.find((r) => r instanceof Error)
+        })
+    }
+  }
+
+  return new Promise((resolve, reject) => {
+    try {
+      tryFn()
+        .then(resolve)
+        .catch(reject)
+    } catch (err) {
+      reject(err)
+    }
+  })
+}
+
+function withContentSriSync (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    let lastErr = null
+    for (const meta of digests) {
+      try {
+        return withContentSriSync(cache, meta, fn)
+      } catch (err) {
+        lastErr = err
+      }
+    }
+    throw lastErr
+  }
+}
+
+function sizeError (expected, found) {
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}

+ 19 - 0
node_modules/cacache/lib/content/rm.js

@@ -0,0 +1,19 @@
+'use strict'
+
+const util = require('util')
+
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+const rimraf = util.promisify(require('rimraf'))
+
+module.exports = rm
+
+function rm (cache, integrity) {
+  return hasContent(cache, integrity).then((content) => {
+    // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+    if (content && content.sri)
+      return rimraf(contentPath(cache, content.sri)).then(() => true)
+    else
+      return false
+  })
+}

+ 189 - 0
node_modules/cacache/lib/content/write.js

@@ -0,0 +1,189 @@
+'use strict'
+
+const util = require('util')
+
+const contentPath = require('./path')
+const fixOwner = require('../util/fix-owner')
+const fs = require('fs')
+const moveFile = require('../util/move-file')
+const Minipass = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const { disposer } = require('./../util/disposer')
+const fsm = require('fs-minipass')
+
+const writeFile = util.promisify(fs.writeFile)
+
+module.exports = write
+
+function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+  if (algorithms && algorithms.length > 1)
+    throw new Error('opts.algorithms only supports a single algorithm for now')
+
+  if (typeof size === 'number' && data.length !== size)
+    return Promise.reject(sizeError(size, data.length))
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts))
+    return Promise.reject(checksumError(integrity, sri))
+
+  return disposer(makeTmp(cache, opts), makeTmpDisposer,
+    (tmp) => {
+      return writeFile(tmp.target, data, { flag: 'wx' })
+        .then(() => moveToDestination(tmp, cache, sri, opts))
+    })
+    .then(() => ({ integrity: sri, size: data.length }))
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+function handleContent (inputStream, cache, opts) {
+  return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => {
+    return pipeToTmp(inputStream, cache, tmp.target, opts)
+      .then((res) => {
+        return moveToDestination(
+          tmp,
+          cache,
+          res.integrity,
+          opts
+        ).then(() => res)
+      })
+  })
+}
+
+function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  // NB: this can throw if the hashStream has a problem with
+  // it, and the data is fully written.  but pipeToTmp is only
+  // called in promisory contexts where that is handled.
+  const pipeline = new Pipeline(
+    inputStream,
+    hashStream,
+    outStream
+  )
+
+  return pipeline.promise()
+    .then(() => ({ integrity, size }))
+    .catch(er => rimraf(tmpTarget).then(() => {
+      throw er
+    }))
+}
+
+function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({
+    target: tmpTarget,
+    moved: false,
+  }))
+}
+
+function makeTmpDisposer (tmp) {
+  if (tmp.moved)
+    return Promise.resolve()
+
+  return rimraf(tmp.target)
+}
+
+function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+
+  return fixOwner
+    .mkdirfix(cache, destDir)
+    .then(() => {
+      return moveFile(tmp.target, destination)
+    })
+    .then(() => {
+      tmp.moved = true
+      return fixOwner.chownr(cache, destination)
+    })
+}
+
+function sizeError (expected, found) {
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}

+ 394 - 0
node_modules/cacache/lib/entry-index.js

@@ -0,0 +1,394 @@
+'use strict'
+
+const util = require('util')
+const crypto = require('crypto')
+const fs = require('fs')
+const Minipass = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const { disposer } = require('./util/disposer')
+const contentPath = require('./content/path')
+const fixOwner = require('./util/fix-owner')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const moveFile = require('@npmcli/move-file')
+const _rimraf = require('rimraf')
+const rimraf = util.promisify(_rimraf)
+rimraf.sync = _rimraf.sync
+
+const appendFile = util.promisify(fs.appendFile)
+const readFile = util.promisify(fs.readFile)
+const readdir = util.promisify(fs.readdir)
+const writeFile = util.promisify(fs.writeFile)
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry)
+      break
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry))))
+      newEntries.unshift(entry)
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await fixOwner.mkdirfix(cache, path.dirname(target))
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved)
+      return rimraf(tmp.target)
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await fixOwner.mkdirfix(cache, path.dirname(bucket))
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+    try {
+      await fixOwner.chownr(cache, bucket)
+    } catch (err) {
+      if (err.code !== 'ENOENT')
+        throw err
+    }
+  }
+
+  // write the file atomically
+  await disposer(setup(), teardown, write)
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: Date.now(),
+    size,
+    metadata,
+  }
+  return fixOwner
+    .mkdirfix(cache, path.dirname(bucket))
+    .then(() => {
+      const stringified = JSON.stringify(entry)
+      // NOTE - Cleverness ahoy!
+      //
+      // This works because it's tremendously unlikely for an entry to corrupt
+      // another while still preserving the string length of the JSON in
+      // question. So, we just slap the length in there and verify it on read.
+      //
+      // Thanks to @isaacs for the whiteboarding session that ended up with
+      // this.
+      return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+    })
+    .then(() => fixOwner.chownr(cache, bucket))
+    .catch((err) => {
+      if (err.code === 'ENOENT')
+        return undefined
+
+      throw err
+      // There's a class of race conditions that happen when things get deleted
+      // during fixOwner, or between the two mkdirfix/chownr calls.
+      //
+      // It's perfectly fine to just not bother in those cases and lie
+      // that the index entry was written. Because it's a cache.
+    })
+    .then(() => {
+      return formatEntry(cache, entry)
+    })
+}
+
+module.exports.insert.sync = insertSync
+
+function insertSync (cache, key, integrity, opts = {}) {
+  const { metadata, size } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: Date.now(),
+    size,
+    metadata,
+  }
+  fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
+  const stringified = JSON.stringify(entry)
+  fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  try {
+    fixOwner.chownr.sync(cache, bucket)
+  } catch (err) {
+    if (err.code !== 'ENOENT')
+      throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  return bucketEntries(bucket)
+    .then((entries) => {
+      return entries.reduce((latest, next) => {
+        if (next && next.key === key)
+          return formatEntry(cache, next)
+        else
+          return latest
+      }, null)
+    })
+    .catch((err) => {
+      if (err.code === 'ENOENT')
+        return null
+      else
+        throw err
+    })
+}
+
+module.exports.find.sync = findSync
+
+function findSync (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    return bucketEntriesSync(bucket).reduce((latest, next) => {
+      if (next && next.key === key)
+        return formatEntry(cache, next)
+      else
+        return latest
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT')
+      return null
+    else
+      throw err
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully)
+    return insert(cache, key, null, opts)
+
+  const bucket = bucketPath(cache, key)
+  return rimraf(bucket)
+}
+
+module.exports.delete.sync = delSync
+
+function delSync (cache, key, opts = {}) {
+  if (!opts.removeFully)
+    return insertSync(cache, key, null, opts)
+
+  const bucket = bucketPath(cache, key)
+  return rimraf.sync(bucket)
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  readdirOrEmpty(indexDir).then(buckets => Promise.all(
+    buckets.map(bucket => {
+      const bucketPath = path.join(indexDir, bucket)
+      return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all(
+        subbuckets.map(subbucket => {
+          const subbucketPath = path.join(bucketPath, subbucket)
+
+          // "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
+          return readdirOrEmpty(subbucketPath).then(entries => Promise.all(
+            entries.map(entry => {
+              const entryPath = path.join(subbucketPath, entry)
+              return bucketEntries(entryPath).then(entries =>
+                // using a Map here prevents duplicate keys from
+                // showing up twice, I guess?
+                entries.reduce((acc, entry) => {
+                  acc.set(entry.key, entry)
+                  return acc
+                }, new Map())
+              ).then(reduced => {
+                // reduced is a map of key => entry
+                for (const entry of reduced.values()) {
+                  const formatted = formatEntry(cache, entry)
+                  if (formatted)
+                    stream.write(formatted)
+                }
+              }).catch(err => {
+                if (err.code === 'ENOENT')
+                  return undefined
+                throw err
+              })
+            })
+          ))
+        })
+      ))
+    })
+  ))
+    .then(
+      () => stream.end(),
+      err => stream.emit('error', err)
+    )
+
+  return stream
+}
+
+module.exports.ls = ls
+
+function ls (cache) {
+  return lsStream(cache).collect().then(entries =>
+    entries.reduce((acc, xs) => {
+      acc[xs.key] = xs
+      return acc
+    }, {})
+  )
+}
+
+module.exports.bucketEntries = bucketEntries
+
+function bucketEntries (bucket, filter) {
+  return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter))
+}
+
+module.exports.bucketEntries.sync = bucketEntriesSync
+
+function bucketEntriesSync (bucket, filter) {
+  const data = fs.readFileSync(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry)
+      return
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (e) {
+      // Entry is corrupted!
+      return
+    }
+    if (obj)
+      entries.push(obj)
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll)
+    return null
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR')
+      return []
+
+    throw err
+  })
+}

+ 73 - 0
node_modules/cacache/lib/memoization.js

@@ -0,0 +1,73 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MAX_SIZE = 50 * 1024 * 1024 // 50MB
+const MAX_AGE = 3 * 60 * 1000
+
+const MEMOIZED = new LRU({
+  max: MAX_SIZE,
+  maxAge: MAX_AGE,
+  length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.reset()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize)
+    return MEMOIZED
+  else if (opts.memoize.get && opts.memoize.set)
+    return opts.memoize
+  else if (typeof opts.memoize === 'object')
+    return new ObjProxy(opts.memoize)
+  else
+    return MEMOIZED
+}

+ 30 - 0
node_modules/cacache/lib/util/disposer.js

@@ -0,0 +1,30 @@
+'use strict'
+
+module.exports.disposer = disposer
+
+function disposer (creatorFn, disposerFn, fn) {
+  const runDisposer = (resource, result, shouldThrow = false) => {
+    return disposerFn(resource)
+      .then(
+        // disposer resolved, do something with original fn's promise
+        () => {
+          if (shouldThrow)
+            throw result
+
+          return result
+        },
+        // Disposer fn failed, crash process
+        (err) => {
+          throw err
+          // Or process.exit?
+        })
+  }
+
+  return creatorFn
+    .then((resource) => {
+      // fn(resource) can throw, so wrap in a promise here
+      return Promise.resolve().then(() => fn(resource))
+        .then((result) => runDisposer(resource, result))
+        .catch((err) => runDisposer(resource, err, true))
+    })
+}

+ 142 - 0
node_modules/cacache/lib/util/fix-owner.js

@@ -0,0 +1,142 @@
+'use strict'
+
+const util = require('util')
+
+const chownr = util.promisify(require('chownr'))
+const mkdirp = require('mkdirp')
+const inflight = require('promise-inflight')
+const inferOwner = require('infer-owner')
+
+// Memoize getuid()/getgid() calls.
+// patch process.setuid/setgid to invalidate cached value on change
+const self = { uid: null, gid: null }
+const getSelf = () => {
+  if (typeof self.uid !== 'number') {
+    self.uid = process.getuid()
+    const setuid = process.setuid
+    process.setuid = (uid) => {
+      self.uid = null
+      process.setuid = setuid
+      return process.setuid(uid)
+    }
+  }
+  if (typeof self.gid !== 'number') {
+    self.gid = process.getgid()
+    const setgid = process.setgid
+    process.setgid = (gid) => {
+      self.gid = null
+      process.setgid = setgid
+      return process.setgid(gid)
+    }
+  }
+}
+
+module.exports.chownr = fixOwner
+
+function fixOwner (cache, filepath) {
+  if (!process.getuid) {
+    // This platform doesn't need ownership fixing
+    return Promise.resolve()
+  }
+
+  getSelf()
+  if (self.uid !== 0) {
+    // almost certainly can't chown anyway
+    return Promise.resolve()
+  }
+
+  return Promise.resolve(inferOwner(cache)).then((owner) => {
+    const { uid, gid } = owner
+
+    // No need to override if it's already what we used.
+    if (self.uid === uid && self.gid === gid)
+      return
+
+    return inflight('fixOwner: fixing ownership on ' + filepath, () =>
+      chownr(
+        filepath,
+        typeof uid === 'number' ? uid : self.uid,
+        typeof gid === 'number' ? gid : self.gid
+      ).catch((err) => {
+        if (err.code === 'ENOENT')
+          return null
+
+        throw err
+      })
+    )
+  })
+}
+
+module.exports.chownr.sync = fixOwnerSync
+
+function fixOwnerSync (cache, filepath) {
+  if (!process.getuid) {
+    // This platform doesn't need ownership fixing
+    return
+  }
+  const { uid, gid } = inferOwner.sync(cache)
+  getSelf()
+  if (self.uid !== 0) {
+    // almost certainly can't chown anyway
+    return
+  }
+
+  if (self.uid === uid && self.gid === gid) {
+    // No need to override if it's already what we used.
+    return
+  }
+  try {
+    chownr.sync(
+      filepath,
+      typeof uid === 'number' ? uid : self.uid,
+      typeof gid === 'number' ? gid : self.gid
+    )
+  } catch (err) {
+    // only catch ENOENT, any other error is a problem.
+    if (err.code === 'ENOENT')
+      return null
+
+    throw err
+  }
+}
+
+module.exports.mkdirfix = mkdirfix
+
+function mkdirfix (cache, p, cb) {
+  // we have to infer the owner _before_ making the directory, even though
+  // we aren't going to use the results, since the cache itself might not
+  // exist yet.  If we mkdirp it, then our current uid/gid will be assumed
+  // to be correct if it creates the cache folder in the process.
+  return Promise.resolve(inferOwner(cache)).then(() => {
+    return mkdirp(p)
+      .then((made) => {
+        if (made)
+          return fixOwner(cache, made).then(() => made)
+      })
+      .catch((err) => {
+        if (err.code === 'EEXIST')
+          return fixOwner(cache, p).then(() => null)
+
+        throw err
+      })
+  })
+}
+
+module.exports.mkdirfix.sync = mkdirfixSync
+
+function mkdirfixSync (cache, p) {
+  try {
+    inferOwner.sync(cache)
+    const made = mkdirp.sync(p)
+    if (made) {
+      fixOwnerSync(cache, made)
+      return made
+    }
+  } catch (err) {
+    if (err.code === 'EEXIST') {
+      fixOwnerSync(cache, p)
+      return null
+    } else
+      throw err
+  }
+}

+ 7 - 0
node_modules/cacache/lib/util/hash-to-segments.js

@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}

+ 67 - 0
node_modules/cacache/lib/util/move-file.js

@@ -0,0 +1,67 @@
+'use strict'
+
+const fs = require('fs')
+const util = require('util')
+const chmod = util.promisify(fs.chmod)
+const unlink = util.promisify(fs.unlink)
+const stat = util.promisify(fs.stat)
+const move = require('@npmcli/move-file')
+const pinflight = require('promise-inflight')
+
+module.exports = moveFile
+
+function moveFile (src, dest) {
+  const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ ||
+    process.platform === 'win32'
+
+  // This isn't quite an fs.rename -- the assumption is that
+  // if `dest` already exists, and we get certain errors while
+  // trying to move it, we should just not bother.
+  //
+  // In the case of cache corruption, users will receive an
+  // EINTEGRITY error elsewhere, and can remove the offending
+  // content their own way.
+  //
+  // Note that, as the name suggests, this strictly only supports file moves.
+  return new Promise((resolve, reject) => {
+    fs.link(src, dest, (err) => {
+      if (err) {
+        if (isWindows && err.code === 'EPERM') {
+          // XXX This is a really weird way to handle this situation, as it
+          // results in the src file being deleted even though the dest
+          // might not exist.  Since we pretty much always write files to
+          // deterministic locations based on content hash, this is likely
+          // ok (or at worst, just ends in a future cache miss).  But it would
+          // be worth investigating at some time in the future if this is
+          // really what we want to do here.
+          return resolve()
+        } else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
+          // file already exists, so whatever
+          return resolve()
+        } else
+          return reject(err)
+      } else
+        return resolve()
+    })
+  })
+    .then(() => {
+      // content should never change for any reason, so make it read-only
+      return Promise.all([
+        unlink(src),
+        !isWindows && chmod(dest, '0444'),
+      ])
+    })
+    .catch(() => {
+      return pinflight('cacache-move-file:' + dest, () => {
+        return stat(dest).catch((err) => {
+          if (err.code !== 'ENOENT') {
+            // Something else is wrong here. Bail bail bail
+            throw err
+          }
+          // file doesn't already exist! let's try a rename -> copy fallback
+          // only delete if it successfully copies
+          return move(src, dest)
+        })
+      })
+    })
+}

+ 35 - 0
node_modules/cacache/lib/util/tmp.js

@@ -0,0 +1,35 @@
+'use strict'
+
+const fs = require('@npmcli/fs')
+
+const fixOwner = require('./fix-owner')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+    .then(() => {
+      // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+      const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+      return fs.mkdtemp(target, { owner: 'inherit' })
+    })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return fs.withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
+
+module.exports.fix = fixtmpdir
+
+function fixtmpdir (cache) {
+  return fixOwner(cache, path.join(cache, 'tmp'))
+}

+ 287 - 0
node_modules/cacache/lib/verify.js

@@ -0,0 +1,287 @@
+'use strict'
+
+const util = require('util')
+
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fixOwner = require('./util/fix-owner')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const glob = util.promisify(require('glob'))
+const index = require('./entry-index')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const stat = util.promisify(fs.stat)
+const truncate = util.promisify(fs.truncate)
+const writeFile = util.promisify(fs.writeFile)
+const readFile = util.promisify(fs.readFile)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  return steps
+    .reduce((promise, step, i) => {
+      const label = step.name
+      const start = new Date()
+      return promise.then((stats) => {
+        return step(cache, opts).then((s) => {
+          s &&
+            Object.keys(s).forEach((k) => {
+              stats[k] = s[k]
+            })
+          const end = new Date()
+          if (!stats.runTime)
+            stats.runTime = {}
+
+          stats.runTime[label] = end - start
+          return Promise.resolve(stats)
+        })
+      })
+    }, Promise.resolve({}))
+    .then((stats) => {
+      stats.runTime.total = stats.endTime - stats.startTime
+      opts.log.silly(
+        'verify',
+        'verification finished for',
+        cache,
+        'in',
+        `${stats.runTime.total}ms`
+      )
+      return stats
+    })
+}
+
+function markStartTime (cache, opts) {
+  return Promise.resolve({ startTime: new Date() })
+}
+
+function markEndTime (cache, opts) {
+  return Promise.resolve({ endTime: new Date() })
+}
+
+function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  return fixOwner
+    .mkdirfix(cache, cache)
+    .then(() => {
+      // TODO - fix file permissions too
+      return fixOwner.chownr(cache, cache)
+    })
+    .then(() => null)
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rimraf it.
+//
+function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry))
+      return
+
+    liveContent.add(entry.integrity.toString())
+  })
+  return new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  }).then(() => {
+    const contentDir = contentPath.contentDir(cache)
+    return glob(path.join(contentDir, '**'), {
+      follow: false,
+      nodir: true,
+      nosort: true,
+    }).then((files) => {
+      return Promise.resolve({
+        verifiedContent: 0,
+        reclaimedCount: 0,
+        reclaimedSize: 0,
+        badContentCount: 0,
+        keptSize: 0,
+      }).then((stats) =>
+        pMap(
+          files,
+          (f) => {
+            const split = f.split(/[/\\]/)
+            const digest = split.slice(split.length - 3).join('')
+            const algo = split[split.length - 4]
+            const integrity = ssri.fromHex(digest, algo)
+            if (liveContent.has(integrity.toString())) {
+              return verifyContent(f, integrity).then((info) => {
+                if (!info.valid) {
+                  stats.reclaimedCount++
+                  stats.badContentCount++
+                  stats.reclaimedSize += info.size
+                } else {
+                  stats.verifiedContent++
+                  stats.keptSize += info.size
+                }
+                return stats
+              })
+            } else {
+              // No entries refer to this content. We can delete.
+              stats.reclaimedCount++
+              return stat(f).then((s) => {
+                return rimraf(f).then(() => {
+                  stats.reclaimedSize += s.size
+                  return stats
+                })
+              })
+            }
+          },
+          { concurrency: opts.concurrency }
+        ).then(() => stats)
+      )
+    })
+  })
+}
+
+function verifyContent (filepath, sri) {
+  return stat(filepath)
+    .then((s) => {
+      const contentInfo = {
+        size: s.size,
+        valid: true,
+      }
+      return ssri
+        .checkStream(new fsm.ReadStream(filepath), sri)
+        .catch((err) => {
+          if (err.code !== 'EINTEGRITY')
+            throw err
+
+          return rimraf(filepath).then(() => {
+            contentInfo.valid = false
+          })
+        })
+        .then(() => contentInfo)
+    })
+    .catch((err) => {
+      if (err.code === 'ENOENT')
+        return { size: 0, valid: false }
+
+      throw err
+    })
+}
+
+function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  return index.ls(cache).then((entries) => {
+    const stats = {
+      missingContent: 0,
+      rejectedEntries: 0,
+      totalEntries: 0,
+    }
+    const buckets = {}
+    for (const k in entries) {
+      /* istanbul ignore else */
+      if (hasOwnProperty(entries, k)) {
+        const hashed = index.hashKey(k)
+        const entry = entries[k]
+        const excluded = opts.filter && !opts.filter(entry)
+        excluded && stats.rejectedEntries++
+        if (buckets[hashed] && !excluded)
+          buckets[hashed].push(entry)
+        else if (buckets[hashed] && excluded) {
+          // skip
+        } else if (excluded) {
+          buckets[hashed] = []
+          buckets[hashed]._path = index.bucketPath(cache, k)
+        } else {
+          buckets[hashed] = [entry]
+          buckets[hashed]._path = index.bucketPath(cache, k)
+        }
+      }
+    }
+    return pMap(
+      Object.keys(buckets),
+      (key) => {
+        return rebuildBucket(cache, buckets[key], stats, opts)
+      },
+      { concurrency: opts.concurrency }
+    ).then(() => stats)
+  })
+}
+
+function rebuildBucket (cache, bucket, stats, opts) {
+  return truncate(bucket._path).then(() => {
+    // This needs to be serialized because cacache explicitly
+    // lets very racy bucket conflicts clobber each other.
+    return bucket.reduce((promise, entry) => {
+      return promise.then(() => {
+        const content = contentPath(cache, entry.integrity)
+        return stat(content)
+          .then(() => {
+            return index
+              .insert(cache, entry.key, entry.integrity, {
+                metadata: entry.metadata,
+                size: entry.size,
+              })
+              .then(() => {
+                stats.totalEntries++
+              })
+          })
+          .catch((err) => {
+            if (err.code === 'ENOENT') {
+              stats.rejectedEntries++
+              stats.missingContent++
+              return
+            }
+            throw err
+          })
+      })
+    }, Promise.resolve())
+  })
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rimraf(path.join(cache, 'tmp'))
+}
+
+function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  try {
+    return writeFile(verifile, '' + +new Date())
+  } finally {
+    fixOwner.chownr.sync(cache, verifile)
+  }
+}
+
+module.exports.lastRun = lastRun
+
+function lastRun (cache) {
+  return readFile(path.join(cache, '_lastverified'), 'utf8').then(
+    (data) => new Date(+data)
+  )
+}

+ 6 - 0
node_modules/cacache/ls.js

@@ -0,0 +1,6 @@
+'use strict'
+
+const index = require('./lib/entry-index')
+
+module.exports = index.ls
+module.exports.stream = index.lsStream

+ 112 - 0
node_modules/cacache/package.json

@@ -0,0 +1,112 @@
+{
+  "_from": "cacache@^15.0.5",
+  "_id": "cacache@15.3.0",
+  "_inBundle": false,
+  "_integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==",
+  "_location": "/cacache",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "cacache@^15.0.5",
+    "name": "cacache",
+    "escapedName": "cacache",
+    "rawSpec": "^15.0.5",
+    "saveSpec": null,
+    "fetchSpec": "^15.0.5"
+  },
+  "_requiredBy": [
+    "/compression-webpack-plugin"
+  ],
+  "_resolved": "https://registry.npmmirror.com/cacache/-/cacache-15.3.0.tgz",
+  "_shasum": "dc85380fb2f556fe3dda4c719bfa0ec875a7f1eb",
+  "_spec": "cacache@^15.0.5",
+  "_where": "E:\\psy_web_share\\node_modules\\compression-webpack-plugin",
+  "bugs": {
+    "url": "https://github.com/npm/cacache/issues"
+  },
+  "bundleDependencies": false,
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "dependencies": {
+    "@npmcli/fs": "^1.0.0",
+    "@npmcli/move-file": "^1.0.1",
+    "chownr": "^2.0.0",
+    "fs-minipass": "^2.0.0",
+    "glob": "^7.1.4",
+    "infer-owner": "^1.0.4",
+    "lru-cache": "^6.0.0",
+    "minipass": "^3.1.1",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.2",
+    "mkdirp": "^1.0.3",
+    "p-map": "^4.0.0",
+    "promise-inflight": "^1.0.1",
+    "rimraf": "^3.0.2",
+    "ssri": "^8.0.1",
+    "tar": "^6.0.2",
+    "unique-filename": "^1.1.1"
+  },
+  "deprecated": false,
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "devDependencies": {
+    "@npmcli/lint": "^1.0.1",
+    "benchmark": "^2.1.4",
+    "chalk": "^4.0.0",
+    "require-inject": "^1.4.4",
+    "tacks": "^1.3.0",
+    "tap": "^15.0.9"
+  },
+  "engines": {
+    "node": ">= 10"
+  },
+  "files": [
+    "*.js",
+    "lib"
+  ],
+  "homepage": "https://github.com/npm/cacache#readme",
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "main": "index.js",
+  "name": "cacache",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/cacache.git"
+  },
+  "scripts": {
+    "benchmarks": "node test/benchmarks",
+    "coverage": "tap",
+    "lint": "npm run npmclilint -- \"*.*js\" \"lib/**/*.*js\" \"test/**/*.*js\"",
+    "lintfix": "npm run lint -- --fix",
+    "npmclilint": "npmcli-lint",
+    "postsnap": "npm run lintfix --",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preversion": "npm test",
+    "snap": "tap",
+    "test": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test"
+  },
+  "tap": {
+    "100": true,
+    "test-regex": "test/[^/]*.js"
+  },
+  "version": "15.3.0"
+}

+ 83 - 0
node_modules/cacache/put.js

@@ -0,0 +1,83 @@
+'use strict'
+
+const index = require('./lib/entry-index')
+const memo = require('./lib/memoization')
+const write = require('./lib/content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  return write(cache, data, opts).then((res) => {
+    return index
+      .insert(cache, key, res.integrity, { ...opts, size: res.size })
+      .then((entry) => {
+        if (memoize)
+          memo.put(cache, entry, data, opts)
+
+        return res.integrity
+      })
+  })
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    flush () {
+      return index
+        .insert(cache, key, integrity, { ...opts, size })
+        .then((entry) => {
+          if (memoize && memoData)
+            memo.put(cache, entry, memoData, opts)
+
+          if (integrity)
+            pipeline.emit('integrity', integrity)
+
+          if (size)
+            pipeline.emit('size', size)
+        })
+    },
+  }))
+
+  return pipeline
+}

+ 31 - 0
node_modules/cacache/rm.js

@@ -0,0 +1,31 @@
+'use strict'
+
+const util = require('util')
+
+const index = require('./lib/entry-index')
+const memo = require('./lib/memoization')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const rmContent = require('./lib/content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+function all (cache) {
+  memo.clearMemoized()
+  return rimraf(path.join(cache, '*(content-*|index-*)'))
+}

+ 3 - 0
node_modules/cacache/verify.js

@@ -0,0 +1,3 @@
+'use strict'
+
+module.exports = require('./lib/verify')

+ 15 - 0
node_modules/chownr/LICENSE

@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

+ 3 - 0
node_modules/chownr/README.md

@@ -0,0 +1,3 @@
+Like `chown -R`.
+
+Takes the same arguments as `fs.chown()`

+ 167 - 0
node_modules/chownr/chownr.js

@@ -0,0 +1,167 @@
+'use strict'
+const fs = require('fs')
+const path = require('path')
+
+/* istanbul ignore next */
+const LCHOWN = fs.lchown ? 'lchown' : 'chown'
+/* istanbul ignore next */
+const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
+
+/* istanbul ignore next */
+const needEISDIRHandled = fs.lchown &&
+  !process.version.match(/v1[1-9]+\./) &&
+  !process.version.match(/v10\.[6-9]/)
+
+const lchownSync = (path, uid, gid) => {
+  try {
+    return fs[LCHOWNSYNC](path, uid, gid)
+  } catch (er) {
+    if (er.code !== 'ENOENT')
+      throw er
+  }
+}
+
+/* istanbul ignore next */
+const chownSync = (path, uid, gid) => {
+  try {
+    return fs.chownSync(path, uid, gid)
+  } catch (er) {
+    if (er.code !== 'ENOENT')
+      throw er
+  }
+}
+
+/* istanbul ignore next */
+const handleEISDIR =
+  needEISDIRHandled ? (path, uid, gid, cb) => er => {
+    // Node prior to v10 had a very questionable implementation of
+    // fs.lchown, which would always try to call fs.open on a directory
+    // Fall back to fs.chown in those cases.
+    if (!er || er.code !== 'EISDIR')
+      cb(er)
+    else
+      fs.chown(path, uid, gid, cb)
+  }
+  : (_, __, ___, cb) => cb
+
+/* istanbul ignore next */
+const handleEISDirSync =
+  needEISDIRHandled ? (path, uid, gid) => {
+    try {
+      return lchownSync(path, uid, gid)
+    } catch (er) {
+      if (er.code !== 'EISDIR')
+        throw er
+      chownSync(path, uid, gid)
+    }
+  }
+  : (path, uid, gid) => lchownSync(path, uid, gid)
+
+// fs.readdir could only accept an options object as of node v6
+const nodeVersion = process.version
+let readdir = (path, options, cb) => fs.readdir(path, options, cb)
+let readdirSync = (path, options) => fs.readdirSync(path, options)
+/* istanbul ignore next */
+if (/^v4\./.test(nodeVersion))
+  readdir = (path, options, cb) => fs.readdir(path, cb)
+
+const chown = (cpath, uid, gid, cb) => {
+  fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
+    // Skip ENOENT error
+    cb(er && er.code !== 'ENOENT' ? er : null)
+  }))
+}
+
+const chownrKid = (p, child, uid, gid, cb) => {
+  if (typeof child === 'string')
+    return fs.lstat(path.resolve(p, child), (er, stats) => {
+      // Skip ENOENT error
+      if (er)
+        return cb(er.code !== 'ENOENT' ? er : null)
+      stats.name = child
+      chownrKid(p, stats, uid, gid, cb)
+    })
+
+  if (child.isDirectory()) {
+    chownr(path.resolve(p, child.name), uid, gid, er => {
+      if (er)
+        return cb(er)
+      const cpath = path.resolve(p, child.name)
+      chown(cpath, uid, gid, cb)
+    })
+  } else {
+    const cpath = path.resolve(p, child.name)
+    chown(cpath, uid, gid, cb)
+  }
+}
+
+
+const chownr = (p, uid, gid, cb) => {
+  readdir(p, { withFileTypes: true }, (er, children) => {
+    // any error other than ENOTDIR or ENOTSUP means it's not readable,
+    // or doesn't exist.  give up.
+    if (er) {
+      if (er.code === 'ENOENT')
+        return cb()
+      else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
+        return cb(er)
+    }
+    if (er || !children.length)
+      return chown(p, uid, gid, cb)
+
+    let len = children.length
+    let errState = null
+    const then = er => {
+      if (errState)
+        return
+      if (er)
+        return cb(errState = er)
+      if (-- len === 0)
+        return chown(p, uid, gid, cb)
+    }
+
+    children.forEach(child => chownrKid(p, child, uid, gid, then))
+  })
+}
+
+const chownrKidSync = (p, child, uid, gid) => {
+  if (typeof child === 'string') {
+    try {
+      const stats = fs.lstatSync(path.resolve(p, child))
+      stats.name = child
+      child = stats
+    } catch (er) {
+      if (er.code === 'ENOENT')
+        return
+      else
+        throw er
+    }
+  }
+
+  if (child.isDirectory())
+    chownrSync(path.resolve(p, child.name), uid, gid)
+
+  handleEISDirSync(path.resolve(p, child.name), uid, gid)
+}
+
+const chownrSync = (p, uid, gid) => {
+  let children
+  try {
+    children = readdirSync(p, { withFileTypes: true })
+  } catch (er) {
+    if (er.code === 'ENOENT')
+      return
+    else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
+      return handleEISDirSync(p, uid, gid)
+    else
+      throw er
+  }
+
+  if (children && children.length)
+    children.forEach(child => chownrKidSync(p, child, uid, gid))
+
+  return handleEISDirSync(p, uid, gid)
+}
+
+module.exports = chownr
+chownr.sync = chownrSync

+ 66 - 0
node_modules/chownr/package.json

@@ -0,0 +1,66 @@
+{
+  "_from": "chownr@^2.0.0",
+  "_id": "chownr@2.0.0",
+  "_inBundle": false,
+  "_integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
+  "_location": "/chownr",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "chownr@^2.0.0",
+    "name": "chownr",
+    "escapedName": "chownr",
+    "rawSpec": "^2.0.0",
+    "saveSpec": null,
+    "fetchSpec": "^2.0.0"
+  },
+  "_requiredBy": [
+    "/cacache",
+    "/tar"
+  ],
+  "_resolved": "https://registry.npmmirror.com/chownr/-/chownr-2.0.0.tgz",
+  "_shasum": "15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece",
+  "_spec": "chownr@^2.0.0",
+  "_where": "E:\\psy_web_share\\node_modules\\cacache",
+  "author": {
+    "name": "Isaac Z. Schlueter",
+    "email": "i@izs.me",
+    "url": "http://blog.izs.me/"
+  },
+  "bugs": {
+    "url": "https://github.com/isaacs/chownr/issues"
+  },
+  "bundleDependencies": false,
+  "deprecated": false,
+  "description": "like `chown -R`",
+  "devDependencies": {
+    "mkdirp": "0.3",
+    "rimraf": "^2.7.1",
+    "tap": "^14.10.6"
+  },
+  "engines": {
+    "node": ">=10"
+  },
+  "files": [
+    "chownr.js"
+  ],
+  "homepage": "https://github.com/isaacs/chownr#readme",
+  "license": "ISC",
+  "main": "chownr.js",
+  "name": "chownr",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/chownr.git"
+  },
+  "scripts": {
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preversion": "npm test",
+    "test": "tap"
+  },
+  "tap": {
+    "check-coverage": true
+  },
+  "version": "2.0.0"
+}

+ 47 - 0
node_modules/clean-stack/index.d.ts

@@ -0,0 +1,47 @@
+declare namespace cleanStack {
+	interface Options {
+		/**
+		Prettify the file paths in the stack:
+
+		`/Users/sindresorhus/dev/clean-stack/unicorn.js:2:15` → `~/dev/clean-stack/unicorn.js:2:15`
+
+		@default false
+		*/
+		readonly pretty?: boolean;
+	}
+}
+
+/**
+Clean up error stack traces. Removes the mostly unhelpful internal Node.js entries.
+
+@param stack - The `stack` property of an `Error`.
+
+@example
+```
+import cleanStack = require('clean-stack');
+
+const error = new Error('Missing unicorn');
+
+console.log(error.stack);
+
+// Error: Missing unicorn
+//     at Object.<anonymous> (/Users/sindresorhus/dev/clean-stack/unicorn.js:2:15)
+//     at Module._compile (module.js:409:26)
+//     at Object.Module._extensions..js (module.js:416:10)
+//     at Module.load (module.js:343:32)
+//     at Function.Module._load (module.js:300:12)
+//     at Function.Module.runMain (module.js:441:10)
+//     at startup (node.js:139:18)
+
+console.log(cleanStack(error.stack));
+
+// Error: Missing unicorn
+//     at Object.<anonymous> (/Users/sindresorhus/dev/clean-stack/unicorn.js:2:15)
+```
+*/
+declare function cleanStack(
+	stack: string,
+	options?: cleanStack.Options
+): string;
+
+export = cleanStack;

+ 40 - 0
node_modules/clean-stack/index.js

@@ -0,0 +1,40 @@
+'use strict';
+const os = require('os');
+
+const extractPathRegex = /\s+at.*(?:\(|\s)(.*)\)?/;
+const pathRegex = /^(?:(?:(?:node|(?:internal\/[\w/]*|.*node_modules\/(?:babel-polyfill|pirates)\/.*)?\w+)\.js:\d+:\d+)|native)/;
+const homeDir = typeof os.homedir === 'undefined' ? '' : os.homedir();
+
+module.exports = (stack, options) => {
+	options = Object.assign({pretty: false}, options);
+
+	return stack.replace(/\\/g, '/')
+		.split('\n')
+		.filter(line => {
+			const pathMatches = line.match(extractPathRegex);
+			if (pathMatches === null || !pathMatches[1]) {
+				return true;
+			}
+
+			const match = pathMatches[1];
+
+			// Electron
+			if (
+				match.includes('.app/Contents/Resources/electron.asar') ||
+				match.includes('.app/Contents/Resources/default_app.asar')
+			) {
+				return false;
+			}
+
+			return !pathRegex.test(match);
+		})
+		.filter(line => line.trim() !== '')
+		.map(line => {
+			if (options.pretty) {
+				return line.replace(extractPathRegex, (m, p1) => m.replace(p1, p1.replace(homeDir, '~')));
+			}
+
+			return line;
+		})
+		.join('\n');
+};

+ 9 - 0
node_modules/clean-stack/license

@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 71 - 0
node_modules/clean-stack/package.json

@@ -0,0 +1,71 @@
+{
+  "_from": "clean-stack@^2.0.0",
+  "_id": "clean-stack@2.2.0",
+  "_inBundle": false,
+  "_integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
+  "_location": "/clean-stack",
+  "_phantomChildren": {},
+  "_requested": {
+    "type": "range",
+    "registry": true,
+    "raw": "clean-stack@^2.0.0",
+    "name": "clean-stack",
+    "escapedName": "clean-stack",
+    "rawSpec": "^2.0.0",
+    "saveSpec": null,
+    "fetchSpec": "^2.0.0"
+  },
+  "_requiredBy": [
+    "/aggregate-error"
+  ],
+  "_resolved": "https://registry.npmmirror.com/clean-stack/-/clean-stack-2.2.0.tgz",
+  "_shasum": "ee8472dbb129e727b31e8a10a427dee9dfe4008b",
+  "_spec": "clean-stack@^2.0.0",
+  "_where": "E:\\psy_web_share\\node_modules\\aggregate-error",
+  "author": {
+    "name": "Sindre Sorhus",
+    "email": "sindresorhus@gmail.com",
+    "url": "sindresorhus.com"
+  },
+  "browser": {
+    "os": false
+  },
+  "bugs": {
+    "url": "https://github.com/sindresorhus/clean-stack/issues"
+  },
+  "bundleDependencies": false,
+  "deprecated": false,
+  "description": "Clean up error stack traces",
+  "devDependencies": {
+    "ava": "^1.4.1",
+    "tsd": "^0.7.2",
+    "xo": "^0.24.0"
+  },
+  "engines": {
+    "node": ">=6"
+  },
+  "files": [
+    "index.js",
+    "index.d.ts"
+  ],
+  "homepage": "https://github.com/sindresorhus/clean-stack#readme",
+  "keywords": [
+    "clean",
+    "stack",
+    "trace",
+    "traces",
+    "error",
+    "err",
+    "electron"
+  ],
+  "license": "MIT",
+  "name": "clean-stack",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sindresorhus/clean-stack.git"
+  },
+  "scripts": {
+    "test": "xo && ava && tsd"
+  },
+  "version": "2.2.0"
+}

+ 76 - 0
node_modules/clean-stack/readme.md

@@ -0,0 +1,76 @@
+# clean-stack [![Build Status](https://travis-ci.org/sindresorhus/clean-stack.svg?branch=master)](https://travis-ci.org/sindresorhus/clean-stack)
+
+> Clean up error stack traces
+
+Removes the mostly unhelpful internal Node.js entries.
+
+Also works in Electron.
+
+
+## Install
+
+```
+$ npm install clean-stack
+```
+
+
+## Usage
+
+```js
+const cleanStack = require('clean-stack');
+
+const error = new Error('Missing unicorn');
+
+console.log(error.stack);
+/*
+Error: Missing unicorn
+    at Object.<anonymous> (/Users/sindresorhus/dev/clean-stack/unicorn.js:2:15)
+    at Module._compile (module.js:409:26)
+    at Object.Module._extensions..js (module.js:416:10)
+    at Module.load (module.js:343:32)
+    at Function.Module._load (module.js:300:12)
+    at Function.Module.runMain (module.js:441:10)
+    at startup (node.js:139:18)
+*/
+
+console.log(cleanStack(error.stack));
+/*
+Error: Missing unicorn
+    at Object.<anonymous> (/Users/sindresorhus/dev/clean-stack/unicorn.js:2:15)
+*/
+```
+
+
+## API
+
+### cleanStack(stack, [options])
+
+#### stack
+
+Type: `string`
+
+The `stack` property of an `Error`.
+
+#### options
+
+Type: `Object`
+
+##### pretty
+
+Type: `boolean`<br>
+Default: `false`
+
+Prettify the file paths in the stack:
+
+`/Users/sindresorhus/dev/clean-stack/unicorn.js:2:15` → `~/dev/clean-stack/unicorn.js:2:15`
+
+
+## Related
+
+- [extrack-stack](https://github.com/sindresorhus/extract-stack) - Extract the actual stack of an error
+- [stack-utils](https://github.com/tapjs/stack-utils) - Captures and cleans stack traces
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)

+ 24 - 0
node_modules/commondir/LICENSE

@@ -0,0 +1,24 @@
+The MIT License
+
+Copyright (c) 2013 James Halliday (mail@substack.net)
+
+Permission is hereby granted, free of charge, 
+to any person obtaining a copy of this software and 
+associated documentation files (the "Software"), to 
+deal in the Software without restriction, including 
+without limitation the rights to use, copy, modify, 
+merge, publish, distribute, sublicense, and/or sell 
+copies of the Software, and to permit persons to whom 
+the Software is furnished to do so, 
+subject to the following conditions:
+
+The above copyright notice and this permission notice 
+shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR 
+ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 3 - 0
node_modules/commondir/example/dir.js

@@ -0,0 +1,3 @@
+var commondir = require('../');
+var dir = commondir(process.argv.slice(2));
+console.log(dir);

+ 29 - 0
node_modules/commondir/index.js

@@ -0,0 +1,29 @@
+var path = require('path');
+
+module.exports = function (basedir, relfiles) {
+    if (relfiles) {
+        var files = relfiles.map(function (r) {
+            return path.resolve(basedir, r);
+        });
+    }
+    else {
+        var files = basedir;
+    }
+    
+    var res = files.slice(1).reduce(function (ps, file) {
+        if (!file.match(/^([A-Za-z]:)?\/|\\/)) {
+            throw new Error('relative path without a basedir');
+        }
+        
+        var xs = file.split(/\/+|\\+/);
+        for (
+            var i = 0;
+            ps[i] === xs[i] && i < Math.min(ps.length, xs.length);
+            i++
+        );
+        return ps.slice(0, i);
+    }, files[0].split(/\/+|\\+/));
+    
+    // Windows correctly handles paths with forward-slashes
+    return res.length > 1 ? res.join('/') : '/'
+};

Деякі файли не було показано, через те що забагато файлів було змінено