train.prototxt 30 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898
  1. layer {
  2. name: "data"
  3. type: "AnnotatedData"
  4. top: "data"
  5. top: "label"
  6. include {
  7. phase: TRAIN
  8. }
  9. transform_param {
  10. mirror: true
  11. mean_value: 104
  12. mean_value: 117
  13. mean_value: 123
  14. resize_param {
  15. prob: 1
  16. resize_mode: WARP
  17. height: 300
  18. width: 300
  19. interp_mode: LINEAR
  20. interp_mode: AREA
  21. interp_mode: NEAREST
  22. interp_mode: CUBIC
  23. interp_mode: LANCZOS4
  24. }
  25. emit_constraint {
  26. emit_type: CENTER
  27. }
  28. distort_param {
  29. brightness_prob: 0.5
  30. brightness_delta: 32
  31. contrast_prob: 0.5
  32. contrast_lower: 0.5
  33. contrast_upper: 1.5
  34. hue_prob: 0.5
  35. hue_delta: 18
  36. saturation_prob: 0.5
  37. saturation_lower: 0.5
  38. saturation_upper: 1.5
  39. random_order_prob: 0.0
  40. }
  41. expand_param {
  42. prob: 0.5
  43. max_expand_ratio: 4.0
  44. }
  45. }
  46. data_param {
  47. source: "train_lmdb/"
  48. batch_size: 16
  49. backend: LMDB
  50. }
  51. annotated_data_param {
  52. batch_sampler {
  53. max_sample: 1
  54. max_trials: 1
  55. }
  56. batch_sampler {
  57. sampler {
  58. min_scale: 0.3
  59. max_scale: 1.0
  60. min_aspect_ratio: 0.5
  61. max_aspect_ratio: 2.0
  62. }
  63. sample_constraint {
  64. min_jaccard_overlap: 0.1
  65. }
  66. max_sample: 1
  67. max_trials: 50
  68. }
  69. batch_sampler {
  70. sampler {
  71. min_scale: 0.3
  72. max_scale: 1.0
  73. min_aspect_ratio: 0.5
  74. max_aspect_ratio: 2.0
  75. }
  76. sample_constraint {
  77. min_jaccard_overlap: 0.3
  78. }
  79. max_sample: 1
  80. max_trials: 50
  81. }
  82. batch_sampler {
  83. sampler {
  84. min_scale: 0.3
  85. max_scale: 1.0
  86. min_aspect_ratio: 0.5
  87. max_aspect_ratio: 2.0
  88. }
  89. sample_constraint {
  90. min_jaccard_overlap: 0.5
  91. }
  92. max_sample: 1
  93. max_trials: 50
  94. }
  95. batch_sampler {
  96. sampler {
  97. min_scale: 0.3
  98. max_scale: 1.0
  99. min_aspect_ratio: 0.5
  100. max_aspect_ratio: 2.0
  101. }
  102. sample_constraint {
  103. min_jaccard_overlap: 0.7
  104. }
  105. max_sample: 1
  106. max_trials: 50
  107. }
  108. batch_sampler {
  109. sampler {
  110. min_scale: 0.3
  111. max_scale: 1.0
  112. min_aspect_ratio: 0.5
  113. max_aspect_ratio: 2.0
  114. }
  115. sample_constraint {
  116. min_jaccard_overlap: 0.9
  117. }
  118. max_sample: 1
  119. max_trials: 50
  120. }
  121. batch_sampler {
  122. sampler {
  123. min_scale: 0.3
  124. max_scale: 1.0
  125. min_aspect_ratio: 0.5
  126. max_aspect_ratio: 2.0
  127. }
  128. sample_constraint {
  129. max_jaccard_overlap: 1.0
  130. }
  131. max_sample: 1
  132. max_trials: 50
  133. }
  134. }
  135. }
  136. layer {
  137. name: "data_bn"
  138. type: "BatchNorm"
  139. bottom: "data"
  140. top: "data_bn"
  141. param {
  142. lr_mult: 0.0
  143. }
  144. param {
  145. lr_mult: 0.0
  146. }
  147. param {
  148. lr_mult: 0.0
  149. }
  150. }
  151. layer {
  152. name: "data_scale"
  153. type: "Scale"
  154. bottom: "data_bn"
  155. top: "data_bn"
  156. param {
  157. lr_mult: 1.0
  158. decay_mult: 1.0
  159. }
  160. param {
  161. lr_mult: 2.0
  162. decay_mult: 1.0
  163. }
  164. scale_param {
  165. bias_term: true
  166. }
  167. }
  168. layer {
  169. name: "conv1_h"
  170. type: "Convolution"
  171. bottom: "data_bn"
  172. top: "conv1_h"
  173. param {
  174. lr_mult: 1.0
  175. decay_mult: 1.0
  176. }
  177. param {
  178. lr_mult: 2.0
  179. decay_mult: 1.0
  180. }
  181. convolution_param {
  182. num_output: 32
  183. pad: 3
  184. kernel_size: 7
  185. stride: 2
  186. weight_filler {
  187. type: "msra"
  188. variance_norm: FAN_OUT
  189. }
  190. bias_filler {
  191. type: "constant"
  192. value: 0.0
  193. }
  194. }
  195. }
  196. layer {
  197. name: "conv1_bn_h"
  198. type: "BatchNorm"
  199. bottom: "conv1_h"
  200. top: "conv1_h"
  201. param {
  202. lr_mult: 0.0
  203. }
  204. param {
  205. lr_mult: 0.0
  206. }
  207. param {
  208. lr_mult: 0.0
  209. }
  210. }
  211. layer {
  212. name: "conv1_scale_h"
  213. type: "Scale"
  214. bottom: "conv1_h"
  215. top: "conv1_h"
  216. param {
  217. lr_mult: 1.0
  218. decay_mult: 1.0
  219. }
  220. param {
  221. lr_mult: 2.0
  222. decay_mult: 1.0
  223. }
  224. scale_param {
  225. bias_term: true
  226. }
  227. }
  228. layer {
  229. name: "conv1_relu"
  230. type: "ReLU"
  231. bottom: "conv1_h"
  232. top: "conv1_h"
  233. }
  234. layer {
  235. name: "conv1_pool"
  236. type: "Pooling"
  237. bottom: "conv1_h"
  238. top: "conv1_pool"
  239. pooling_param {
  240. kernel_size: 3
  241. stride: 2
  242. }
  243. }
  244. layer {
  245. name: "layer_64_1_conv1_h"
  246. type: "Convolution"
  247. bottom: "conv1_pool"
  248. top: "layer_64_1_conv1_h"
  249. param {
  250. lr_mult: 1.0
  251. decay_mult: 1.0
  252. }
  253. convolution_param {
  254. num_output: 32
  255. bias_term: false
  256. pad: 1
  257. kernel_size: 3
  258. stride: 1
  259. weight_filler {
  260. type: "msra"
  261. }
  262. bias_filler {
  263. type: "constant"
  264. value: 0.0
  265. }
  266. }
  267. }
  268. layer {
  269. name: "layer_64_1_bn2_h"
  270. type: "BatchNorm"
  271. bottom: "layer_64_1_conv1_h"
  272. top: "layer_64_1_conv1_h"
  273. param {
  274. lr_mult: 0.0
  275. }
  276. param {
  277. lr_mult: 0.0
  278. }
  279. param {
  280. lr_mult: 0.0
  281. }
  282. }
  283. layer {
  284. name: "layer_64_1_scale2_h"
  285. type: "Scale"
  286. bottom: "layer_64_1_conv1_h"
  287. top: "layer_64_1_conv1_h"
  288. param {
  289. lr_mult: 1.0
  290. decay_mult: 1.0
  291. }
  292. param {
  293. lr_mult: 2.0
  294. decay_mult: 1.0
  295. }
  296. scale_param {
  297. bias_term: true
  298. }
  299. }
  300. layer {
  301. name: "layer_64_1_relu2"
  302. type: "ReLU"
  303. bottom: "layer_64_1_conv1_h"
  304. top: "layer_64_1_conv1_h"
  305. }
  306. layer {
  307. name: "layer_64_1_conv2_h"
  308. type: "Convolution"
  309. bottom: "layer_64_1_conv1_h"
  310. top: "layer_64_1_conv2_h"
  311. param {
  312. lr_mult: 1.0
  313. decay_mult: 1.0
  314. }
  315. convolution_param {
  316. num_output: 32
  317. bias_term: false
  318. pad: 1
  319. kernel_size: 3
  320. stride: 1
  321. weight_filler {
  322. type: "msra"
  323. }
  324. bias_filler {
  325. type: "constant"
  326. value: 0.0
  327. }
  328. }
  329. }
  330. layer {
  331. name: "layer_64_1_sum"
  332. type: "Eltwise"
  333. bottom: "layer_64_1_conv2_h"
  334. bottom: "conv1_pool"
  335. top: "layer_64_1_sum"
  336. }
  337. layer {
  338. name: "layer_128_1_bn1_h"
  339. type: "BatchNorm"
  340. bottom: "layer_64_1_sum"
  341. top: "layer_128_1_bn1_h"
  342. param {
  343. lr_mult: 0.0
  344. }
  345. param {
  346. lr_mult: 0.0
  347. }
  348. param {
  349. lr_mult: 0.0
  350. }
  351. }
  352. layer {
  353. name: "layer_128_1_scale1_h"
  354. type: "Scale"
  355. bottom: "layer_128_1_bn1_h"
  356. top: "layer_128_1_bn1_h"
  357. param {
  358. lr_mult: 1.0
  359. decay_mult: 1.0
  360. }
  361. param {
  362. lr_mult: 2.0
  363. decay_mult: 1.0
  364. }
  365. scale_param {
  366. bias_term: true
  367. }
  368. }
  369. layer {
  370. name: "layer_128_1_relu1"
  371. type: "ReLU"
  372. bottom: "layer_128_1_bn1_h"
  373. top: "layer_128_1_bn1_h"
  374. }
  375. layer {
  376. name: "layer_128_1_conv1_h"
  377. type: "Convolution"
  378. bottom: "layer_128_1_bn1_h"
  379. top: "layer_128_1_conv1_h"
  380. param {
  381. lr_mult: 1.0
  382. decay_mult: 1.0
  383. }
  384. convolution_param {
  385. num_output: 128
  386. bias_term: false
  387. pad: 1
  388. kernel_size: 3
  389. stride: 2
  390. weight_filler {
  391. type: "msra"
  392. }
  393. bias_filler {
  394. type: "constant"
  395. value: 0.0
  396. }
  397. }
  398. }
  399. layer {
  400. name: "layer_128_1_bn2"
  401. type: "BatchNorm"
  402. bottom: "layer_128_1_conv1_h"
  403. top: "layer_128_1_conv1_h"
  404. param {
  405. lr_mult: 0.0
  406. }
  407. param {
  408. lr_mult: 0.0
  409. }
  410. param {
  411. lr_mult: 0.0
  412. }
  413. }
  414. layer {
  415. name: "layer_128_1_scale2"
  416. type: "Scale"
  417. bottom: "layer_128_1_conv1_h"
  418. top: "layer_128_1_conv1_h"
  419. param {
  420. lr_mult: 1.0
  421. decay_mult: 1.0
  422. }
  423. param {
  424. lr_mult: 2.0
  425. decay_mult: 1.0
  426. }
  427. scale_param {
  428. bias_term: true
  429. }
  430. }
  431. layer {
  432. name: "layer_128_1_relu2"
  433. type: "ReLU"
  434. bottom: "layer_128_1_conv1_h"
  435. top: "layer_128_1_conv1_h"
  436. }
  437. layer {
  438. name: "layer_128_1_conv2"
  439. type: "Convolution"
  440. bottom: "layer_128_1_conv1_h"
  441. top: "layer_128_1_conv2"
  442. param {
  443. lr_mult: 1.0
  444. decay_mult: 1.0
  445. }
  446. convolution_param {
  447. num_output: 128
  448. bias_term: false
  449. pad: 1
  450. kernel_size: 3
  451. stride: 1
  452. weight_filler {
  453. type: "msra"
  454. }
  455. bias_filler {
  456. type: "constant"
  457. value: 0.0
  458. }
  459. }
  460. }
  461. layer {
  462. name: "layer_128_1_conv_expand_h"
  463. type: "Convolution"
  464. bottom: "layer_128_1_bn1_h"
  465. top: "layer_128_1_conv_expand_h"
  466. param {
  467. lr_mult: 1.0
  468. decay_mult: 1.0
  469. }
  470. convolution_param {
  471. num_output: 128
  472. bias_term: false
  473. pad: 0
  474. kernel_size: 1
  475. stride: 2
  476. weight_filler {
  477. type: "msra"
  478. }
  479. bias_filler {
  480. type: "constant"
  481. value: 0.0
  482. }
  483. }
  484. }
  485. layer {
  486. name: "layer_128_1_sum"
  487. type: "Eltwise"
  488. bottom: "layer_128_1_conv2"
  489. bottom: "layer_128_1_conv_expand_h"
  490. top: "layer_128_1_sum"
  491. }
  492. layer {
  493. name: "layer_256_1_bn1"
  494. type: "BatchNorm"
  495. bottom: "layer_128_1_sum"
  496. top: "layer_256_1_bn1"
  497. param {
  498. lr_mult: 0.0
  499. }
  500. param {
  501. lr_mult: 0.0
  502. }
  503. param {
  504. lr_mult: 0.0
  505. }
  506. }
  507. layer {
  508. name: "layer_256_1_scale1"
  509. type: "Scale"
  510. bottom: "layer_256_1_bn1"
  511. top: "layer_256_1_bn1"
  512. param {
  513. lr_mult: 1.0
  514. decay_mult: 1.0
  515. }
  516. param {
  517. lr_mult: 2.0
  518. decay_mult: 1.0
  519. }
  520. scale_param {
  521. bias_term: true
  522. }
  523. }
  524. layer {
  525. name: "layer_256_1_relu1"
  526. type: "ReLU"
  527. bottom: "layer_256_1_bn1"
  528. top: "layer_256_1_bn1"
  529. }
  530. layer {
  531. name: "layer_256_1_conv1"
  532. type: "Convolution"
  533. bottom: "layer_256_1_bn1"
  534. top: "layer_256_1_conv1"
  535. param {
  536. lr_mult: 1.0
  537. decay_mult: 1.0
  538. }
  539. convolution_param {
  540. num_output: 256
  541. bias_term: false
  542. pad: 1
  543. kernel_size: 3
  544. stride: 2
  545. weight_filler {
  546. type: "msra"
  547. }
  548. bias_filler {
  549. type: "constant"
  550. value: 0.0
  551. }
  552. }
  553. }
  554. layer {
  555. name: "layer_256_1_bn2"
  556. type: "BatchNorm"
  557. bottom: "layer_256_1_conv1"
  558. top: "layer_256_1_conv1"
  559. param {
  560. lr_mult: 0.0
  561. }
  562. param {
  563. lr_mult: 0.0
  564. }
  565. param {
  566. lr_mult: 0.0
  567. }
  568. }
  569. layer {
  570. name: "layer_256_1_scale2"
  571. type: "Scale"
  572. bottom: "layer_256_1_conv1"
  573. top: "layer_256_1_conv1"
  574. param {
  575. lr_mult: 1.0
  576. decay_mult: 1.0
  577. }
  578. param {
  579. lr_mult: 2.0
  580. decay_mult: 1.0
  581. }
  582. scale_param {
  583. bias_term: true
  584. }
  585. }
  586. layer {
  587. name: "layer_256_1_relu2"
  588. type: "ReLU"
  589. bottom: "layer_256_1_conv1"
  590. top: "layer_256_1_conv1"
  591. }
  592. layer {
  593. name: "layer_256_1_conv2"
  594. type: "Convolution"
  595. bottom: "layer_256_1_conv1"
  596. top: "layer_256_1_conv2"
  597. param {
  598. lr_mult: 1.0
  599. decay_mult: 1.0
  600. }
  601. convolution_param {
  602. num_output: 256
  603. bias_term: false
  604. pad: 1
  605. kernel_size: 3
  606. stride: 1
  607. weight_filler {
  608. type: "msra"
  609. }
  610. bias_filler {
  611. type: "constant"
  612. value: 0.0
  613. }
  614. }
  615. }
  616. layer {
  617. name: "layer_256_1_conv_expand"
  618. type: "Convolution"
  619. bottom: "layer_256_1_bn1"
  620. top: "layer_256_1_conv_expand"
  621. param {
  622. lr_mult: 1.0
  623. decay_mult: 1.0
  624. }
  625. convolution_param {
  626. num_output: 256
  627. bias_term: false
  628. pad: 0
  629. kernel_size: 1
  630. stride: 2
  631. weight_filler {
  632. type: "msra"
  633. }
  634. bias_filler {
  635. type: "constant"
  636. value: 0.0
  637. }
  638. }
  639. }
  640. layer {
  641. name: "layer_256_1_sum"
  642. type: "Eltwise"
  643. bottom: "layer_256_1_conv2"
  644. bottom: "layer_256_1_conv_expand"
  645. top: "layer_256_1_sum"
  646. }
  647. layer {
  648. name: "layer_512_1_bn1"
  649. type: "BatchNorm"
  650. bottom: "layer_256_1_sum"
  651. top: "layer_512_1_bn1"
  652. param {
  653. lr_mult: 0.0
  654. }
  655. param {
  656. lr_mult: 0.0
  657. }
  658. param {
  659. lr_mult: 0.0
  660. }
  661. }
  662. layer {
  663. name: "layer_512_1_scale1"
  664. type: "Scale"
  665. bottom: "layer_512_1_bn1"
  666. top: "layer_512_1_bn1"
  667. param {
  668. lr_mult: 1.0
  669. decay_mult: 1.0
  670. }
  671. param {
  672. lr_mult: 2.0
  673. decay_mult: 1.0
  674. }
  675. scale_param {
  676. bias_term: true
  677. }
  678. }
  679. layer {
  680. name: "layer_512_1_relu1"
  681. type: "ReLU"
  682. bottom: "layer_512_1_bn1"
  683. top: "layer_512_1_bn1"
  684. }
  685. layer {
  686. name: "layer_512_1_conv1_h"
  687. type: "Convolution"
  688. bottom: "layer_512_1_bn1"
  689. top: "layer_512_1_conv1_h"
  690. param {
  691. lr_mult: 1.0
  692. decay_mult: 1.0
  693. }
  694. convolution_param {
  695. num_output: 128
  696. bias_term: false
  697. pad: 1
  698. kernel_size: 3
  699. stride: 1 # 2
  700. weight_filler {
  701. type: "msra"
  702. }
  703. bias_filler {
  704. type: "constant"
  705. value: 0.0
  706. }
  707. }
  708. }
  709. layer {
  710. name: "layer_512_1_bn2_h"
  711. type: "BatchNorm"
  712. bottom: "layer_512_1_conv1_h"
  713. top: "layer_512_1_conv1_h"
  714. param {
  715. lr_mult: 0.0
  716. }
  717. param {
  718. lr_mult: 0.0
  719. }
  720. param {
  721. lr_mult: 0.0
  722. }
  723. }
  724. layer {
  725. name: "layer_512_1_scale2_h"
  726. type: "Scale"
  727. bottom: "layer_512_1_conv1_h"
  728. top: "layer_512_1_conv1_h"
  729. param {
  730. lr_mult: 1.0
  731. decay_mult: 1.0
  732. }
  733. param {
  734. lr_mult: 2.0
  735. decay_mult: 1.0
  736. }
  737. scale_param {
  738. bias_term: true
  739. }
  740. }
  741. layer {
  742. name: "layer_512_1_relu2"
  743. type: "ReLU"
  744. bottom: "layer_512_1_conv1_h"
  745. top: "layer_512_1_conv1_h"
  746. }
  747. layer {
  748. name: "layer_512_1_conv2_h"
  749. type: "Convolution"
  750. bottom: "layer_512_1_conv1_h"
  751. top: "layer_512_1_conv2_h"
  752. param {
  753. lr_mult: 1.0
  754. decay_mult: 1.0
  755. }
  756. convolution_param {
  757. num_output: 256
  758. bias_term: false
  759. pad: 2 # 1
  760. kernel_size: 3
  761. stride: 1
  762. dilation: 2
  763. weight_filler {
  764. type: "msra"
  765. }
  766. bias_filler {
  767. type: "constant"
  768. value: 0.0
  769. }
  770. }
  771. }
  772. layer {
  773. name: "layer_512_1_conv_expand_h"
  774. type: "Convolution"
  775. bottom: "layer_512_1_bn1"
  776. top: "layer_512_1_conv_expand_h"
  777. param {
  778. lr_mult: 1.0
  779. decay_mult: 1.0
  780. }
  781. convolution_param {
  782. num_output: 256
  783. bias_term: false
  784. pad: 0
  785. kernel_size: 1
  786. stride: 1 # 2
  787. weight_filler {
  788. type: "msra"
  789. }
  790. bias_filler {
  791. type: "constant"
  792. value: 0.0
  793. }
  794. }
  795. }
  796. layer {
  797. name: "layer_512_1_sum"
  798. type: "Eltwise"
  799. bottom: "layer_512_1_conv2_h"
  800. bottom: "layer_512_1_conv_expand_h"
  801. top: "layer_512_1_sum"
  802. }
  803. layer {
  804. name: "last_bn_h"
  805. type: "BatchNorm"
  806. bottom: "layer_512_1_sum"
  807. top: "layer_512_1_sum"
  808. param {
  809. lr_mult: 0.0
  810. }
  811. param {
  812. lr_mult: 0.0
  813. }
  814. param {
  815. lr_mult: 0.0
  816. }
  817. }
  818. layer {
  819. name: "last_scale_h"
  820. type: "Scale"
  821. bottom: "layer_512_1_sum"
  822. top: "layer_512_1_sum"
  823. param {
  824. lr_mult: 1.0
  825. decay_mult: 1.0
  826. }
  827. param {
  828. lr_mult: 2.0
  829. decay_mult: 1.0
  830. }
  831. scale_param {
  832. bias_term: true
  833. }
  834. }
  835. layer {
  836. name: "last_relu"
  837. type: "ReLU"
  838. bottom: "layer_512_1_sum"
  839. top: "fc7"
  840. }
  841. layer {
  842. name: "conv6_1_h"
  843. type: "Convolution"
  844. bottom: "fc7"
  845. top: "conv6_1_h"
  846. param {
  847. lr_mult: 1
  848. decay_mult: 1
  849. }
  850. param {
  851. lr_mult: 2
  852. decay_mult: 0
  853. }
  854. convolution_param {
  855. num_output: 128
  856. pad: 0
  857. kernel_size: 1
  858. stride: 1
  859. weight_filler {
  860. type: "xavier"
  861. }
  862. bias_filler {
  863. type: "constant"
  864. value: 0
  865. }
  866. }
  867. }
  868. layer {
  869. name: "conv6_1_relu"
  870. type: "ReLU"
  871. bottom: "conv6_1_h"
  872. top: "conv6_1_h"
  873. }
  874. layer {
  875. name: "conv6_2_h"
  876. type: "Convolution"
  877. bottom: "conv6_1_h"
  878. top: "conv6_2_h"
  879. param {
  880. lr_mult: 1
  881. decay_mult: 1
  882. }
  883. param {
  884. lr_mult: 2
  885. decay_mult: 0
  886. }
  887. convolution_param {
  888. num_output: 256
  889. pad: 1
  890. kernel_size: 3
  891. stride: 2
  892. weight_filler {
  893. type: "xavier"
  894. }
  895. bias_filler {
  896. type: "constant"
  897. value: 0
  898. }
  899. }
  900. }
  901. layer {
  902. name: "conv6_2_relu"
  903. type: "ReLU"
  904. bottom: "conv6_2_h"
  905. top: "conv6_2_h"
  906. }
  907. layer {
  908. name: "conv7_1_h"
  909. type: "Convolution"
  910. bottom: "conv6_2_h"
  911. top: "conv7_1_h"
  912. param {
  913. lr_mult: 1
  914. decay_mult: 1
  915. }
  916. param {
  917. lr_mult: 2
  918. decay_mult: 0
  919. }
  920. convolution_param {
  921. num_output: 64
  922. pad: 0
  923. kernel_size: 1
  924. stride: 1
  925. weight_filler {
  926. type: "xavier"
  927. }
  928. bias_filler {
  929. type: "constant"
  930. value: 0
  931. }
  932. }
  933. }
  934. layer {
  935. name: "conv7_1_relu"
  936. type: "ReLU"
  937. bottom: "conv7_1_h"
  938. top: "conv7_1_h"
  939. }
  940. layer {
  941. name: "conv7_2_h"
  942. type: "Convolution"
  943. bottom: "conv7_1_h"
  944. top: "conv7_2_h"
  945. param {
  946. lr_mult: 1
  947. decay_mult: 1
  948. }
  949. param {
  950. lr_mult: 2
  951. decay_mult: 0
  952. }
  953. convolution_param {
  954. num_output: 128
  955. pad: 1
  956. kernel_size: 3
  957. stride: 2
  958. weight_filler {
  959. type: "xavier"
  960. }
  961. bias_filler {
  962. type: "constant"
  963. value: 0
  964. }
  965. }
  966. }
  967. layer {
  968. name: "conv7_2_relu"
  969. type: "ReLU"
  970. bottom: "conv7_2_h"
  971. top: "conv7_2_h"
  972. }
  973. layer {
  974. name: "conv8_1_h"
  975. type: "Convolution"
  976. bottom: "conv7_2_h"
  977. top: "conv8_1_h"
  978. param {
  979. lr_mult: 1
  980. decay_mult: 1
  981. }
  982. param {
  983. lr_mult: 2
  984. decay_mult: 0
  985. }
  986. convolution_param {
  987. num_output: 64
  988. pad: 0
  989. kernel_size: 1
  990. stride: 1
  991. weight_filler {
  992. type: "xavier"
  993. }
  994. bias_filler {
  995. type: "constant"
  996. value: 0
  997. }
  998. }
  999. }
  1000. layer {
  1001. name: "conv8_1_relu"
  1002. type: "ReLU"
  1003. bottom: "conv8_1_h"
  1004. top: "conv8_1_h"
  1005. }
  1006. layer {
  1007. name: "conv8_2_h"
  1008. type: "Convolution"
  1009. bottom: "conv8_1_h"
  1010. top: "conv8_2_h"
  1011. param {
  1012. lr_mult: 1
  1013. decay_mult: 1
  1014. }
  1015. param {
  1016. lr_mult: 2
  1017. decay_mult: 0
  1018. }
  1019. convolution_param {
  1020. num_output: 128
  1021. pad: 0
  1022. kernel_size: 3
  1023. stride: 1
  1024. weight_filler {
  1025. type: "xavier"
  1026. }
  1027. bias_filler {
  1028. type: "constant"
  1029. value: 0
  1030. }
  1031. }
  1032. }
  1033. layer {
  1034. name: "conv8_2_relu"
  1035. type: "ReLU"
  1036. bottom: "conv8_2_h"
  1037. top: "conv8_2_h"
  1038. }
  1039. layer {
  1040. name: "conv9_1_h"
  1041. type: "Convolution"
  1042. bottom: "conv8_2_h"
  1043. top: "conv9_1_h"
  1044. param {
  1045. lr_mult: 1
  1046. decay_mult: 1
  1047. }
  1048. param {
  1049. lr_mult: 2
  1050. decay_mult: 0
  1051. }
  1052. convolution_param {
  1053. num_output: 64
  1054. pad: 0
  1055. kernel_size: 1
  1056. stride: 1
  1057. weight_filler {
  1058. type: "xavier"
  1059. }
  1060. bias_filler {
  1061. type: "constant"
  1062. value: 0
  1063. }
  1064. }
  1065. }
  1066. layer {
  1067. name: "conv9_1_relu"
  1068. type: "ReLU"
  1069. bottom: "conv9_1_h"
  1070. top: "conv9_1_h"
  1071. }
  1072. layer {
  1073. name: "conv9_2_h"
  1074. type: "Convolution"
  1075. bottom: "conv9_1_h"
  1076. top: "conv9_2_h"
  1077. param {
  1078. lr_mult: 1
  1079. decay_mult: 1
  1080. }
  1081. param {
  1082. lr_mult: 2
  1083. decay_mult: 0
  1084. }
  1085. convolution_param {
  1086. num_output: 128
  1087. pad: 0
  1088. kernel_size: 3
  1089. stride: 1
  1090. weight_filler {
  1091. type: "xavier"
  1092. }
  1093. bias_filler {
  1094. type: "constant"
  1095. value: 0
  1096. }
  1097. }
  1098. }
  1099. layer {
  1100. name: "conv9_2_relu"
  1101. type: "ReLU"
  1102. bottom: "conv9_2_h"
  1103. top: "conv9_2_h"
  1104. }
  1105. layer {
  1106. name: "conv4_3_norm"
  1107. type: "Normalize"
  1108. bottom: "layer_256_1_bn1"
  1109. top: "conv4_3_norm"
  1110. norm_param {
  1111. across_spatial: false
  1112. scale_filler {
  1113. type: "constant"
  1114. value: 20
  1115. }
  1116. channel_shared: false
  1117. }
  1118. }
  1119. layer {
  1120. name: "conv4_3_norm_mbox_loc"
  1121. type: "Convolution"
  1122. bottom: "conv4_3_norm"
  1123. top: "conv4_3_norm_mbox_loc"
  1124. param {
  1125. lr_mult: 1
  1126. decay_mult: 1
  1127. }
  1128. param {
  1129. lr_mult: 2
  1130. decay_mult: 0
  1131. }
  1132. convolution_param {
  1133. num_output: 16
  1134. pad: 1
  1135. kernel_size: 3
  1136. stride: 1
  1137. weight_filler {
  1138. type: "xavier"
  1139. }
  1140. bias_filler {
  1141. type: "constant"
  1142. value: 0
  1143. }
  1144. }
  1145. }
  1146. layer {
  1147. name: "conv4_3_norm_mbox_loc_perm"
  1148. type: "Permute"
  1149. bottom: "conv4_3_norm_mbox_loc"
  1150. top: "conv4_3_norm_mbox_loc_perm"
  1151. permute_param {
  1152. order: 0
  1153. order: 2
  1154. order: 3
  1155. order: 1
  1156. }
  1157. }
  1158. layer {
  1159. name: "conv4_3_norm_mbox_loc_flat"
  1160. type: "Flatten"
  1161. bottom: "conv4_3_norm_mbox_loc_perm"
  1162. top: "conv4_3_norm_mbox_loc_flat"
  1163. flatten_param {
  1164. axis: 1
  1165. }
  1166. }
  1167. layer {
  1168. name: "conv4_3_norm_mbox_conf"
  1169. type: "Convolution"
  1170. bottom: "conv4_3_norm"
  1171. top: "conv4_3_norm_mbox_conf"
  1172. param {
  1173. lr_mult: 1
  1174. decay_mult: 1
  1175. }
  1176. param {
  1177. lr_mult: 2
  1178. decay_mult: 0
  1179. }
  1180. convolution_param {
  1181. num_output: 8 # 84
  1182. pad: 1
  1183. kernel_size: 3
  1184. stride: 1
  1185. weight_filler {
  1186. type: "xavier"
  1187. }
  1188. bias_filler {
  1189. type: "constant"
  1190. value: 0
  1191. }
  1192. }
  1193. }
  1194. layer {
  1195. name: "conv4_3_norm_mbox_conf_perm"
  1196. type: "Permute"
  1197. bottom: "conv4_3_norm_mbox_conf"
  1198. top: "conv4_3_norm_mbox_conf_perm"
  1199. permute_param {
  1200. order: 0
  1201. order: 2
  1202. order: 3
  1203. order: 1
  1204. }
  1205. }
  1206. layer {
  1207. name: "conv4_3_norm_mbox_conf_flat"
  1208. type: "Flatten"
  1209. bottom: "conv4_3_norm_mbox_conf_perm"
  1210. top: "conv4_3_norm_mbox_conf_flat"
  1211. flatten_param {
  1212. axis: 1
  1213. }
  1214. }
  1215. layer {
  1216. name: "conv4_3_norm_mbox_priorbox"
  1217. type: "PriorBox"
  1218. bottom: "conv4_3_norm"
  1219. bottom: "data"
  1220. top: "conv4_3_norm_mbox_priorbox"
  1221. prior_box_param {
  1222. min_size: 30.0
  1223. max_size: 60.0
  1224. aspect_ratio: 2
  1225. flip: true
  1226. clip: false
  1227. variance: 0.1
  1228. variance: 0.1
  1229. variance: 0.2
  1230. variance: 0.2
  1231. step: 8
  1232. offset: 0.5
  1233. }
  1234. }
  1235. layer {
  1236. name: "fc7_mbox_loc"
  1237. type: "Convolution"
  1238. bottom: "fc7"
  1239. top: "fc7_mbox_loc"
  1240. param {
  1241. lr_mult: 1
  1242. decay_mult: 1
  1243. }
  1244. param {
  1245. lr_mult: 2
  1246. decay_mult: 0
  1247. }
  1248. convolution_param {
  1249. num_output: 24
  1250. pad: 1
  1251. kernel_size: 3
  1252. stride: 1
  1253. weight_filler {
  1254. type: "xavier"
  1255. }
  1256. bias_filler {
  1257. type: "constant"
  1258. value: 0
  1259. }
  1260. }
  1261. }
  1262. layer {
  1263. name: "fc7_mbox_loc_perm"
  1264. type: "Permute"
  1265. bottom: "fc7_mbox_loc"
  1266. top: "fc7_mbox_loc_perm"
  1267. permute_param {
  1268. order: 0
  1269. order: 2
  1270. order: 3
  1271. order: 1
  1272. }
  1273. }
  1274. layer {
  1275. name: "fc7_mbox_loc_flat"
  1276. type: "Flatten"
  1277. bottom: "fc7_mbox_loc_perm"
  1278. top: "fc7_mbox_loc_flat"
  1279. flatten_param {
  1280. axis: 1
  1281. }
  1282. }
  1283. layer {
  1284. name: "fc7_mbox_conf"
  1285. type: "Convolution"
  1286. bottom: "fc7"
  1287. top: "fc7_mbox_conf"
  1288. param {
  1289. lr_mult: 1
  1290. decay_mult: 1
  1291. }
  1292. param {
  1293. lr_mult: 2
  1294. decay_mult: 0
  1295. }
  1296. convolution_param {
  1297. num_output: 12 # 126
  1298. pad: 1
  1299. kernel_size: 3
  1300. stride: 1
  1301. weight_filler {
  1302. type: "xavier"
  1303. }
  1304. bias_filler {
  1305. type: "constant"
  1306. value: 0
  1307. }
  1308. }
  1309. }
  1310. layer {
  1311. name: "fc7_mbox_conf_perm"
  1312. type: "Permute"
  1313. bottom: "fc7_mbox_conf"
  1314. top: "fc7_mbox_conf_perm"
  1315. permute_param {
  1316. order: 0
  1317. order: 2
  1318. order: 3
  1319. order: 1
  1320. }
  1321. }
  1322. layer {
  1323. name: "fc7_mbox_conf_flat"
  1324. type: "Flatten"
  1325. bottom: "fc7_mbox_conf_perm"
  1326. top: "fc7_mbox_conf_flat"
  1327. flatten_param {
  1328. axis: 1
  1329. }
  1330. }
  1331. layer {
  1332. name: "fc7_mbox_priorbox"
  1333. type: "PriorBox"
  1334. bottom: "fc7"
  1335. bottom: "data"
  1336. top: "fc7_mbox_priorbox"
  1337. prior_box_param {
  1338. min_size: 60.0
  1339. max_size: 111.0
  1340. aspect_ratio: 2
  1341. aspect_ratio: 3
  1342. flip: true
  1343. clip: false
  1344. variance: 0.1
  1345. variance: 0.1
  1346. variance: 0.2
  1347. variance: 0.2
  1348. step: 16
  1349. offset: 0.5
  1350. }
  1351. }
  1352. layer {
  1353. name: "conv6_2_mbox_loc"
  1354. type: "Convolution"
  1355. bottom: "conv6_2_h"
  1356. top: "conv6_2_mbox_loc"
  1357. param {
  1358. lr_mult: 1
  1359. decay_mult: 1
  1360. }
  1361. param {
  1362. lr_mult: 2
  1363. decay_mult: 0
  1364. }
  1365. convolution_param {
  1366. num_output: 24
  1367. pad: 1
  1368. kernel_size: 3
  1369. stride: 1
  1370. weight_filler {
  1371. type: "xavier"
  1372. }
  1373. bias_filler {
  1374. type: "constant"
  1375. value: 0
  1376. }
  1377. }
  1378. }
  1379. layer {
  1380. name: "conv6_2_mbox_loc_perm"
  1381. type: "Permute"
  1382. bottom: "conv6_2_mbox_loc"
  1383. top: "conv6_2_mbox_loc_perm"
  1384. permute_param {
  1385. order: 0
  1386. order: 2
  1387. order: 3
  1388. order: 1
  1389. }
  1390. }
  1391. layer {
  1392. name: "conv6_2_mbox_loc_flat"
  1393. type: "Flatten"
  1394. bottom: "conv6_2_mbox_loc_perm"
  1395. top: "conv6_2_mbox_loc_flat"
  1396. flatten_param {
  1397. axis: 1
  1398. }
  1399. }
  1400. layer {
  1401. name: "conv6_2_mbox_conf"
  1402. type: "Convolution"
  1403. bottom: "conv6_2_h"
  1404. top: "conv6_2_mbox_conf"
  1405. param {
  1406. lr_mult: 1
  1407. decay_mult: 1
  1408. }
  1409. param {
  1410. lr_mult: 2
  1411. decay_mult: 0
  1412. }
  1413. convolution_param {
  1414. num_output: 12 # 126
  1415. pad: 1
  1416. kernel_size: 3
  1417. stride: 1
  1418. weight_filler {
  1419. type: "xavier"
  1420. }
  1421. bias_filler {
  1422. type: "constant"
  1423. value: 0
  1424. }
  1425. }
  1426. }
  1427. layer {
  1428. name: "conv6_2_mbox_conf_perm"
  1429. type: "Permute"
  1430. bottom: "conv6_2_mbox_conf"
  1431. top: "conv6_2_mbox_conf_perm"
  1432. permute_param {
  1433. order: 0
  1434. order: 2
  1435. order: 3
  1436. order: 1
  1437. }
  1438. }
  1439. layer {
  1440. name: "conv6_2_mbox_conf_flat"
  1441. type: "Flatten"
  1442. bottom: "conv6_2_mbox_conf_perm"
  1443. top: "conv6_2_mbox_conf_flat"
  1444. flatten_param {
  1445. axis: 1
  1446. }
  1447. }
  1448. layer {
  1449. name: "conv6_2_mbox_priorbox"
  1450. type: "PriorBox"
  1451. bottom: "conv6_2_h"
  1452. bottom: "data"
  1453. top: "conv6_2_mbox_priorbox"
  1454. prior_box_param {
  1455. min_size: 111.0
  1456. max_size: 162.0
  1457. aspect_ratio: 2
  1458. aspect_ratio: 3
  1459. flip: true
  1460. clip: false
  1461. variance: 0.1
  1462. variance: 0.1
  1463. variance: 0.2
  1464. variance: 0.2
  1465. step: 32
  1466. offset: 0.5
  1467. }
  1468. }
  1469. layer {
  1470. name: "conv7_2_mbox_loc"
  1471. type: "Convolution"
  1472. bottom: "conv7_2_h"
  1473. top: "conv7_2_mbox_loc"
  1474. param {
  1475. lr_mult: 1
  1476. decay_mult: 1
  1477. }
  1478. param {
  1479. lr_mult: 2
  1480. decay_mult: 0
  1481. }
  1482. convolution_param {
  1483. num_output: 24
  1484. pad: 1
  1485. kernel_size: 3
  1486. stride: 1
  1487. weight_filler {
  1488. type: "xavier"
  1489. }
  1490. bias_filler {
  1491. type: "constant"
  1492. value: 0
  1493. }
  1494. }
  1495. }
  1496. layer {
  1497. name: "conv7_2_mbox_loc_perm"
  1498. type: "Permute"
  1499. bottom: "conv7_2_mbox_loc"
  1500. top: "conv7_2_mbox_loc_perm"
  1501. permute_param {
  1502. order: 0
  1503. order: 2
  1504. order: 3
  1505. order: 1
  1506. }
  1507. }
  1508. layer {
  1509. name: "conv7_2_mbox_loc_flat"
  1510. type: "Flatten"
  1511. bottom: "conv7_2_mbox_loc_perm"
  1512. top: "conv7_2_mbox_loc_flat"
  1513. flatten_param {
  1514. axis: 1
  1515. }
  1516. }
  1517. layer {
  1518. name: "conv7_2_mbox_conf"
  1519. type: "Convolution"
  1520. bottom: "conv7_2_h"
  1521. top: "conv7_2_mbox_conf"
  1522. param {
  1523. lr_mult: 1
  1524. decay_mult: 1
  1525. }
  1526. param {
  1527. lr_mult: 2
  1528. decay_mult: 0
  1529. }
  1530. convolution_param {
  1531. num_output: 12 # 126
  1532. pad: 1
  1533. kernel_size: 3
  1534. stride: 1
  1535. weight_filler {
  1536. type: "xavier"
  1537. }
  1538. bias_filler {
  1539. type: "constant"
  1540. value: 0
  1541. }
  1542. }
  1543. }
  1544. layer {
  1545. name: "conv7_2_mbox_conf_perm"
  1546. type: "Permute"
  1547. bottom: "conv7_2_mbox_conf"
  1548. top: "conv7_2_mbox_conf_perm"
  1549. permute_param {
  1550. order: 0
  1551. order: 2
  1552. order: 3
  1553. order: 1
  1554. }
  1555. }
  1556. layer {
  1557. name: "conv7_2_mbox_conf_flat"
  1558. type: "Flatten"
  1559. bottom: "conv7_2_mbox_conf_perm"
  1560. top: "conv7_2_mbox_conf_flat"
  1561. flatten_param {
  1562. axis: 1
  1563. }
  1564. }
  1565. layer {
  1566. name: "conv7_2_mbox_priorbox"
  1567. type: "PriorBox"
  1568. bottom: "conv7_2_h"
  1569. bottom: "data"
  1570. top: "conv7_2_mbox_priorbox"
  1571. prior_box_param {
  1572. min_size: 162.0
  1573. max_size: 213.0
  1574. aspect_ratio: 2
  1575. aspect_ratio: 3
  1576. flip: true
  1577. clip: false
  1578. variance: 0.1
  1579. variance: 0.1
  1580. variance: 0.2
  1581. variance: 0.2
  1582. step: 64
  1583. offset: 0.5
  1584. }
  1585. }
  1586. layer {
  1587. name: "conv8_2_mbox_loc"
  1588. type: "Convolution"
  1589. bottom: "conv8_2_h"
  1590. top: "conv8_2_mbox_loc"
  1591. param {
  1592. lr_mult: 1
  1593. decay_mult: 1
  1594. }
  1595. param {
  1596. lr_mult: 2
  1597. decay_mult: 0
  1598. }
  1599. convolution_param {
  1600. num_output: 16
  1601. pad: 1
  1602. kernel_size: 3
  1603. stride: 1
  1604. weight_filler {
  1605. type: "xavier"
  1606. }
  1607. bias_filler {
  1608. type: "constant"
  1609. value: 0
  1610. }
  1611. }
  1612. }
  1613. layer {
  1614. name: "conv8_2_mbox_loc_perm"
  1615. type: "Permute"
  1616. bottom: "conv8_2_mbox_loc"
  1617. top: "conv8_2_mbox_loc_perm"
  1618. permute_param {
  1619. order: 0
  1620. order: 2
  1621. order: 3
  1622. order: 1
  1623. }
  1624. }
  1625. layer {
  1626. name: "conv8_2_mbox_loc_flat"
  1627. type: "Flatten"
  1628. bottom: "conv8_2_mbox_loc_perm"
  1629. top: "conv8_2_mbox_loc_flat"
  1630. flatten_param {
  1631. axis: 1
  1632. }
  1633. }
  1634. layer {
  1635. name: "conv8_2_mbox_conf"
  1636. type: "Convolution"
  1637. bottom: "conv8_2_h"
  1638. top: "conv8_2_mbox_conf"
  1639. param {
  1640. lr_mult: 1
  1641. decay_mult: 1
  1642. }
  1643. param {
  1644. lr_mult: 2
  1645. decay_mult: 0
  1646. }
  1647. convolution_param {
  1648. num_output: 8 # 84
  1649. pad: 1
  1650. kernel_size: 3
  1651. stride: 1
  1652. weight_filler {
  1653. type: "xavier"
  1654. }
  1655. bias_filler {
  1656. type: "constant"
  1657. value: 0
  1658. }
  1659. }
  1660. }
  1661. layer {
  1662. name: "conv8_2_mbox_conf_perm"
  1663. type: "Permute"
  1664. bottom: "conv8_2_mbox_conf"
  1665. top: "conv8_2_mbox_conf_perm"
  1666. permute_param {
  1667. order: 0
  1668. order: 2
  1669. order: 3
  1670. order: 1
  1671. }
  1672. }
  1673. layer {
  1674. name: "conv8_2_mbox_conf_flat"
  1675. type: "Flatten"
  1676. bottom: "conv8_2_mbox_conf_perm"
  1677. top: "conv8_2_mbox_conf_flat"
  1678. flatten_param {
  1679. axis: 1
  1680. }
  1681. }
  1682. layer {
  1683. name: "conv8_2_mbox_priorbox"
  1684. type: "PriorBox"
  1685. bottom: "conv8_2_h"
  1686. bottom: "data"
  1687. top: "conv8_2_mbox_priorbox"
  1688. prior_box_param {
  1689. min_size: 213.0
  1690. max_size: 264.0
  1691. aspect_ratio: 2
  1692. flip: true
  1693. clip: false
  1694. variance: 0.1
  1695. variance: 0.1
  1696. variance: 0.2
  1697. variance: 0.2
  1698. step: 100
  1699. offset: 0.5
  1700. }
  1701. }
  1702. layer {
  1703. name: "conv9_2_mbox_loc"
  1704. type: "Convolution"
  1705. bottom: "conv9_2_h"
  1706. top: "conv9_2_mbox_loc"
  1707. param {
  1708. lr_mult: 1
  1709. decay_mult: 1
  1710. }
  1711. param {
  1712. lr_mult: 2
  1713. decay_mult: 0
  1714. }
  1715. convolution_param {
  1716. num_output: 16
  1717. pad: 1
  1718. kernel_size: 3
  1719. stride: 1
  1720. weight_filler {
  1721. type: "xavier"
  1722. }
  1723. bias_filler {
  1724. type: "constant"
  1725. value: 0
  1726. }
  1727. }
  1728. }
  1729. layer {
  1730. name: "conv9_2_mbox_loc_perm"
  1731. type: "Permute"
  1732. bottom: "conv9_2_mbox_loc"
  1733. top: "conv9_2_mbox_loc_perm"
  1734. permute_param {
  1735. order: 0
  1736. order: 2
  1737. order: 3
  1738. order: 1
  1739. }
  1740. }
  1741. layer {
  1742. name: "conv9_2_mbox_loc_flat"
  1743. type: "Flatten"
  1744. bottom: "conv9_2_mbox_loc_perm"
  1745. top: "conv9_2_mbox_loc_flat"
  1746. flatten_param {
  1747. axis: 1
  1748. }
  1749. }
  1750. layer {
  1751. name: "conv9_2_mbox_conf"
  1752. type: "Convolution"
  1753. bottom: "conv9_2_h"
  1754. top: "conv9_2_mbox_conf"
  1755. param {
  1756. lr_mult: 1
  1757. decay_mult: 1
  1758. }
  1759. param {
  1760. lr_mult: 2
  1761. decay_mult: 0
  1762. }
  1763. convolution_param {
  1764. num_output: 8 # 84
  1765. pad: 1
  1766. kernel_size: 3
  1767. stride: 1
  1768. weight_filler {
  1769. type: "xavier"
  1770. }
  1771. bias_filler {
  1772. type: "constant"
  1773. value: 0
  1774. }
  1775. }
  1776. }
  1777. layer {
  1778. name: "conv9_2_mbox_conf_perm"
  1779. type: "Permute"
  1780. bottom: "conv9_2_mbox_conf"
  1781. top: "conv9_2_mbox_conf_perm"
  1782. permute_param {
  1783. order: 0
  1784. order: 2
  1785. order: 3
  1786. order: 1
  1787. }
  1788. }
  1789. layer {
  1790. name: "conv9_2_mbox_conf_flat"
  1791. type: "Flatten"
  1792. bottom: "conv9_2_mbox_conf_perm"
  1793. top: "conv9_2_mbox_conf_flat"
  1794. flatten_param {
  1795. axis: 1
  1796. }
  1797. }
  1798. layer {
  1799. name: "conv9_2_mbox_priorbox"
  1800. type: "PriorBox"
  1801. bottom: "conv9_2_h"
  1802. bottom: "data"
  1803. top: "conv9_2_mbox_priorbox"
  1804. prior_box_param {
  1805. min_size: 264.0
  1806. max_size: 315.0
  1807. aspect_ratio: 2
  1808. flip: true
  1809. clip: false
  1810. variance: 0.1
  1811. variance: 0.1
  1812. variance: 0.2
  1813. variance: 0.2
  1814. step: 300
  1815. offset: 0.5
  1816. }
  1817. }
  1818. layer {
  1819. name: "mbox_loc"
  1820. type: "Concat"
  1821. bottom: "conv4_3_norm_mbox_loc_flat"
  1822. bottom: "fc7_mbox_loc_flat"
  1823. bottom: "conv6_2_mbox_loc_flat"
  1824. bottom: "conv7_2_mbox_loc_flat"
  1825. bottom: "conv8_2_mbox_loc_flat"
  1826. bottom: "conv9_2_mbox_loc_flat"
  1827. top: "mbox_loc"
  1828. concat_param {
  1829. axis: 1
  1830. }
  1831. }
  1832. layer {
  1833. name: "mbox_conf"
  1834. type: "Concat"
  1835. bottom: "conv4_3_norm_mbox_conf_flat"
  1836. bottom: "fc7_mbox_conf_flat"
  1837. bottom: "conv6_2_mbox_conf_flat"
  1838. bottom: "conv7_2_mbox_conf_flat"
  1839. bottom: "conv8_2_mbox_conf_flat"
  1840. bottom: "conv9_2_mbox_conf_flat"
  1841. top: "mbox_conf"
  1842. concat_param {
  1843. axis: 1
  1844. }
  1845. }
  1846. layer {
  1847. name: "mbox_priorbox"
  1848. type: "Concat"
  1849. bottom: "conv4_3_norm_mbox_priorbox"
  1850. bottom: "fc7_mbox_priorbox"
  1851. bottom: "conv6_2_mbox_priorbox"
  1852. bottom: "conv7_2_mbox_priorbox"
  1853. bottom: "conv8_2_mbox_priorbox"
  1854. bottom: "conv9_2_mbox_priorbox"
  1855. top: "mbox_priorbox"
  1856. concat_param {
  1857. axis: 2
  1858. }
  1859. }
  1860. layer {
  1861. name: "mbox_loss"
  1862. type: "MultiBoxLoss"
  1863. bottom: "mbox_loc"
  1864. bottom: "mbox_conf"
  1865. bottom: "mbox_priorbox"
  1866. bottom: "label"
  1867. top: "mbox_loss"
  1868. include {
  1869. phase: TRAIN
  1870. }
  1871. propagate_down: true
  1872. propagate_down: true
  1873. propagate_down: false
  1874. propagate_down: false
  1875. loss_param {
  1876. normalization: VALID
  1877. }
  1878. multibox_loss_param {
  1879. loc_loss_type: SMOOTH_L1
  1880. conf_loss_type: SOFTMAX
  1881. loc_weight: 1.0
  1882. num_classes: 2 # 21
  1883. share_location: true
  1884. match_type: PER_PREDICTION
  1885. overlap_threshold: 0.5
  1886. use_prior_for_matching: true
  1887. background_label_id: 0
  1888. use_difficult_gt: true
  1889. neg_pos_ratio: 3.0
  1890. neg_overlap: 0.5
  1891. code_type: CENTER_SIZE
  1892. ignore_cross_boundary_bbox: false
  1893. mining_type: MAX_NEGATIVE
  1894. }
  1895. }