test.prototxt 28 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831
  1. layer {
  2. name: "data"
  3. type: "AnnotatedData"
  4. top: "data"
  5. top: "label"
  6. include {
  7. phase: TEST
  8. }
  9. transform_param {
  10. mean_value: 104
  11. mean_value: 117
  12. mean_value: 123
  13. resize_param {
  14. prob: 1
  15. resize_mode: WARP
  16. height: 300
  17. width: 300
  18. interp_mode: LINEAR
  19. }
  20. emit_constraint {
  21. emit_type: CENTER
  22. }
  23. }
  24. data_param {
  25. source: "val_lmdb/"
  26. batch_size: 1
  27. backend: LMDB
  28. }
  29. annotated_data_param {
  30. label_map_file: "labelmap.prototxt"
  31. }
  32. }
  33. layer {
  34. name: "data_bn"
  35. type: "BatchNorm"
  36. bottom: "data"
  37. top: "data_bn"
  38. param {
  39. lr_mult: 0.0
  40. }
  41. param {
  42. lr_mult: 0.0
  43. }
  44. param {
  45. lr_mult: 0.0
  46. }
  47. }
  48. layer {
  49. name: "data_scale"
  50. type: "Scale"
  51. bottom: "data_bn"
  52. top: "data_bn"
  53. param {
  54. lr_mult: 1.0
  55. decay_mult: 1.0
  56. }
  57. param {
  58. lr_mult: 2.0
  59. decay_mult: 1.0
  60. }
  61. scale_param {
  62. bias_term: true
  63. }
  64. }
  65. layer {
  66. name: "conv1_h"
  67. type: "Convolution"
  68. bottom: "data_bn"
  69. top: "conv1_h"
  70. param {
  71. lr_mult: 1.0
  72. decay_mult: 1.0
  73. }
  74. param {
  75. lr_mult: 2.0
  76. decay_mult: 1.0
  77. }
  78. convolution_param {
  79. num_output: 32
  80. pad: 3
  81. kernel_size: 7
  82. stride: 2
  83. weight_filler {
  84. type: "msra"
  85. variance_norm: FAN_OUT
  86. }
  87. bias_filler {
  88. type: "constant"
  89. value: 0.0
  90. }
  91. }
  92. }
  93. layer {
  94. name: "conv1_bn_h"
  95. type: "BatchNorm"
  96. bottom: "conv1_h"
  97. top: "conv1_h"
  98. param {
  99. lr_mult: 0.0
  100. }
  101. param {
  102. lr_mult: 0.0
  103. }
  104. param {
  105. lr_mult: 0.0
  106. }
  107. }
  108. layer {
  109. name: "conv1_scale_h"
  110. type: "Scale"
  111. bottom: "conv1_h"
  112. top: "conv1_h"
  113. param {
  114. lr_mult: 1.0
  115. decay_mult: 1.0
  116. }
  117. param {
  118. lr_mult: 2.0
  119. decay_mult: 1.0
  120. }
  121. scale_param {
  122. bias_term: true
  123. }
  124. }
  125. layer {
  126. name: "conv1_relu"
  127. type: "ReLU"
  128. bottom: "conv1_h"
  129. top: "conv1_h"
  130. }
  131. layer {
  132. name: "conv1_pool"
  133. type: "Pooling"
  134. bottom: "conv1_h"
  135. top: "conv1_pool"
  136. pooling_param {
  137. kernel_size: 3
  138. stride: 2
  139. }
  140. }
  141. layer {
  142. name: "layer_64_1_conv1_h"
  143. type: "Convolution"
  144. bottom: "conv1_pool"
  145. top: "layer_64_1_conv1_h"
  146. param {
  147. lr_mult: 1.0
  148. decay_mult: 1.0
  149. }
  150. convolution_param {
  151. num_output: 32
  152. bias_term: false
  153. pad: 1
  154. kernel_size: 3
  155. stride: 1
  156. weight_filler {
  157. type: "msra"
  158. }
  159. bias_filler {
  160. type: "constant"
  161. value: 0.0
  162. }
  163. }
  164. }
  165. layer {
  166. name: "layer_64_1_bn2_h"
  167. type: "BatchNorm"
  168. bottom: "layer_64_1_conv1_h"
  169. top: "layer_64_1_conv1_h"
  170. param {
  171. lr_mult: 0.0
  172. }
  173. param {
  174. lr_mult: 0.0
  175. }
  176. param {
  177. lr_mult: 0.0
  178. }
  179. }
  180. layer {
  181. name: "layer_64_1_scale2_h"
  182. type: "Scale"
  183. bottom: "layer_64_1_conv1_h"
  184. top: "layer_64_1_conv1_h"
  185. param {
  186. lr_mult: 1.0
  187. decay_mult: 1.0
  188. }
  189. param {
  190. lr_mult: 2.0
  191. decay_mult: 1.0
  192. }
  193. scale_param {
  194. bias_term: true
  195. }
  196. }
  197. layer {
  198. name: "layer_64_1_relu2"
  199. type: "ReLU"
  200. bottom: "layer_64_1_conv1_h"
  201. top: "layer_64_1_conv1_h"
  202. }
  203. layer {
  204. name: "layer_64_1_conv2_h"
  205. type: "Convolution"
  206. bottom: "layer_64_1_conv1_h"
  207. top: "layer_64_1_conv2_h"
  208. param {
  209. lr_mult: 1.0
  210. decay_mult: 1.0
  211. }
  212. convolution_param {
  213. num_output: 32
  214. bias_term: false
  215. pad: 1
  216. kernel_size: 3
  217. stride: 1
  218. weight_filler {
  219. type: "msra"
  220. }
  221. bias_filler {
  222. type: "constant"
  223. value: 0.0
  224. }
  225. }
  226. }
  227. layer {
  228. name: "layer_64_1_sum"
  229. type: "Eltwise"
  230. bottom: "layer_64_1_conv2_h"
  231. bottom: "conv1_pool"
  232. top: "layer_64_1_sum"
  233. }
  234. layer {
  235. name: "layer_128_1_bn1_h"
  236. type: "BatchNorm"
  237. bottom: "layer_64_1_sum"
  238. top: "layer_128_1_bn1_h"
  239. param {
  240. lr_mult: 0.0
  241. }
  242. param {
  243. lr_mult: 0.0
  244. }
  245. param {
  246. lr_mult: 0.0
  247. }
  248. }
  249. layer {
  250. name: "layer_128_1_scale1_h"
  251. type: "Scale"
  252. bottom: "layer_128_1_bn1_h"
  253. top: "layer_128_1_bn1_h"
  254. param {
  255. lr_mult: 1.0
  256. decay_mult: 1.0
  257. }
  258. param {
  259. lr_mult: 2.0
  260. decay_mult: 1.0
  261. }
  262. scale_param {
  263. bias_term: true
  264. }
  265. }
  266. layer {
  267. name: "layer_128_1_relu1"
  268. type: "ReLU"
  269. bottom: "layer_128_1_bn1_h"
  270. top: "layer_128_1_bn1_h"
  271. }
  272. layer {
  273. name: "layer_128_1_conv1_h"
  274. type: "Convolution"
  275. bottom: "layer_128_1_bn1_h"
  276. top: "layer_128_1_conv1_h"
  277. param {
  278. lr_mult: 1.0
  279. decay_mult: 1.0
  280. }
  281. convolution_param {
  282. num_output: 128
  283. bias_term: false
  284. pad: 1
  285. kernel_size: 3
  286. stride: 2
  287. weight_filler {
  288. type: "msra"
  289. }
  290. bias_filler {
  291. type: "constant"
  292. value: 0.0
  293. }
  294. }
  295. }
  296. layer {
  297. name: "layer_128_1_bn2"
  298. type: "BatchNorm"
  299. bottom: "layer_128_1_conv1_h"
  300. top: "layer_128_1_conv1_h"
  301. param {
  302. lr_mult: 0.0
  303. }
  304. param {
  305. lr_mult: 0.0
  306. }
  307. param {
  308. lr_mult: 0.0
  309. }
  310. }
  311. layer {
  312. name: "layer_128_1_scale2"
  313. type: "Scale"
  314. bottom: "layer_128_1_conv1_h"
  315. top: "layer_128_1_conv1_h"
  316. param {
  317. lr_mult: 1.0
  318. decay_mult: 1.0
  319. }
  320. param {
  321. lr_mult: 2.0
  322. decay_mult: 1.0
  323. }
  324. scale_param {
  325. bias_term: true
  326. }
  327. }
  328. layer {
  329. name: "layer_128_1_relu2"
  330. type: "ReLU"
  331. bottom: "layer_128_1_conv1_h"
  332. top: "layer_128_1_conv1_h"
  333. }
  334. layer {
  335. name: "layer_128_1_conv2"
  336. type: "Convolution"
  337. bottom: "layer_128_1_conv1_h"
  338. top: "layer_128_1_conv2"
  339. param {
  340. lr_mult: 1.0
  341. decay_mult: 1.0
  342. }
  343. convolution_param {
  344. num_output: 128
  345. bias_term: false
  346. pad: 1
  347. kernel_size: 3
  348. stride: 1
  349. weight_filler {
  350. type: "msra"
  351. }
  352. bias_filler {
  353. type: "constant"
  354. value: 0.0
  355. }
  356. }
  357. }
  358. layer {
  359. name: "layer_128_1_conv_expand_h"
  360. type: "Convolution"
  361. bottom: "layer_128_1_bn1_h"
  362. top: "layer_128_1_conv_expand_h"
  363. param {
  364. lr_mult: 1.0
  365. decay_mult: 1.0
  366. }
  367. convolution_param {
  368. num_output: 128
  369. bias_term: false
  370. pad: 0
  371. kernel_size: 1
  372. stride: 2
  373. weight_filler {
  374. type: "msra"
  375. }
  376. bias_filler {
  377. type: "constant"
  378. value: 0.0
  379. }
  380. }
  381. }
  382. layer {
  383. name: "layer_128_1_sum"
  384. type: "Eltwise"
  385. bottom: "layer_128_1_conv2"
  386. bottom: "layer_128_1_conv_expand_h"
  387. top: "layer_128_1_sum"
  388. }
  389. layer {
  390. name: "layer_256_1_bn1"
  391. type: "BatchNorm"
  392. bottom: "layer_128_1_sum"
  393. top: "layer_256_1_bn1"
  394. param {
  395. lr_mult: 0.0
  396. }
  397. param {
  398. lr_mult: 0.0
  399. }
  400. param {
  401. lr_mult: 0.0
  402. }
  403. }
  404. layer {
  405. name: "layer_256_1_scale1"
  406. type: "Scale"
  407. bottom: "layer_256_1_bn1"
  408. top: "layer_256_1_bn1"
  409. param {
  410. lr_mult: 1.0
  411. decay_mult: 1.0
  412. }
  413. param {
  414. lr_mult: 2.0
  415. decay_mult: 1.0
  416. }
  417. scale_param {
  418. bias_term: true
  419. }
  420. }
  421. layer {
  422. name: "layer_256_1_relu1"
  423. type: "ReLU"
  424. bottom: "layer_256_1_bn1"
  425. top: "layer_256_1_bn1"
  426. }
  427. layer {
  428. name: "layer_256_1_conv1"
  429. type: "Convolution"
  430. bottom: "layer_256_1_bn1"
  431. top: "layer_256_1_conv1"
  432. param {
  433. lr_mult: 1.0
  434. decay_mult: 1.0
  435. }
  436. convolution_param {
  437. num_output: 256
  438. bias_term: false
  439. pad: 1
  440. kernel_size: 3
  441. stride: 2
  442. weight_filler {
  443. type: "msra"
  444. }
  445. bias_filler {
  446. type: "constant"
  447. value: 0.0
  448. }
  449. }
  450. }
  451. layer {
  452. name: "layer_256_1_bn2"
  453. type: "BatchNorm"
  454. bottom: "layer_256_1_conv1"
  455. top: "layer_256_1_conv1"
  456. param {
  457. lr_mult: 0.0
  458. }
  459. param {
  460. lr_mult: 0.0
  461. }
  462. param {
  463. lr_mult: 0.0
  464. }
  465. }
  466. layer {
  467. name: "layer_256_1_scale2"
  468. type: "Scale"
  469. bottom: "layer_256_1_conv1"
  470. top: "layer_256_1_conv1"
  471. param {
  472. lr_mult: 1.0
  473. decay_mult: 1.0
  474. }
  475. param {
  476. lr_mult: 2.0
  477. decay_mult: 1.0
  478. }
  479. scale_param {
  480. bias_term: true
  481. }
  482. }
  483. layer {
  484. name: "layer_256_1_relu2"
  485. type: "ReLU"
  486. bottom: "layer_256_1_conv1"
  487. top: "layer_256_1_conv1"
  488. }
  489. layer {
  490. name: "layer_256_1_conv2"
  491. type: "Convolution"
  492. bottom: "layer_256_1_conv1"
  493. top: "layer_256_1_conv2"
  494. param {
  495. lr_mult: 1.0
  496. decay_mult: 1.0
  497. }
  498. convolution_param {
  499. num_output: 256
  500. bias_term: false
  501. pad: 1
  502. kernel_size: 3
  503. stride: 1
  504. weight_filler {
  505. type: "msra"
  506. }
  507. bias_filler {
  508. type: "constant"
  509. value: 0.0
  510. }
  511. }
  512. }
  513. layer {
  514. name: "layer_256_1_conv_expand"
  515. type: "Convolution"
  516. bottom: "layer_256_1_bn1"
  517. top: "layer_256_1_conv_expand"
  518. param {
  519. lr_mult: 1.0
  520. decay_mult: 1.0
  521. }
  522. convolution_param {
  523. num_output: 256
  524. bias_term: false
  525. pad: 0
  526. kernel_size: 1
  527. stride: 2
  528. weight_filler {
  529. type: "msra"
  530. }
  531. bias_filler {
  532. type: "constant"
  533. value: 0.0
  534. }
  535. }
  536. }
  537. layer {
  538. name: "layer_256_1_sum"
  539. type: "Eltwise"
  540. bottom: "layer_256_1_conv2"
  541. bottom: "layer_256_1_conv_expand"
  542. top: "layer_256_1_sum"
  543. }
  544. layer {
  545. name: "layer_512_1_bn1"
  546. type: "BatchNorm"
  547. bottom: "layer_256_1_sum"
  548. top: "layer_512_1_bn1"
  549. param {
  550. lr_mult: 0.0
  551. }
  552. param {
  553. lr_mult: 0.0
  554. }
  555. param {
  556. lr_mult: 0.0
  557. }
  558. }
  559. layer {
  560. name: "layer_512_1_scale1"
  561. type: "Scale"
  562. bottom: "layer_512_1_bn1"
  563. top: "layer_512_1_bn1"
  564. param {
  565. lr_mult: 1.0
  566. decay_mult: 1.0
  567. }
  568. param {
  569. lr_mult: 2.0
  570. decay_mult: 1.0
  571. }
  572. scale_param {
  573. bias_term: true
  574. }
  575. }
  576. layer {
  577. name: "layer_512_1_relu1"
  578. type: "ReLU"
  579. bottom: "layer_512_1_bn1"
  580. top: "layer_512_1_bn1"
  581. }
  582. layer {
  583. name: "layer_512_1_conv1_h"
  584. type: "Convolution"
  585. bottom: "layer_512_1_bn1"
  586. top: "layer_512_1_conv1_h"
  587. param {
  588. lr_mult: 1.0
  589. decay_mult: 1.0
  590. }
  591. convolution_param {
  592. num_output: 128
  593. bias_term: false
  594. pad: 1
  595. kernel_size: 3
  596. stride: 1 # 2
  597. weight_filler {
  598. type: "msra"
  599. }
  600. bias_filler {
  601. type: "constant"
  602. value: 0.0
  603. }
  604. }
  605. }
  606. layer {
  607. name: "layer_512_1_bn2_h"
  608. type: "BatchNorm"
  609. bottom: "layer_512_1_conv1_h"
  610. top: "layer_512_1_conv1_h"
  611. param {
  612. lr_mult: 0.0
  613. }
  614. param {
  615. lr_mult: 0.0
  616. }
  617. param {
  618. lr_mult: 0.0
  619. }
  620. }
  621. layer {
  622. name: "layer_512_1_scale2_h"
  623. type: "Scale"
  624. bottom: "layer_512_1_conv1_h"
  625. top: "layer_512_1_conv1_h"
  626. param {
  627. lr_mult: 1.0
  628. decay_mult: 1.0
  629. }
  630. param {
  631. lr_mult: 2.0
  632. decay_mult: 1.0
  633. }
  634. scale_param {
  635. bias_term: true
  636. }
  637. }
  638. layer {
  639. name: "layer_512_1_relu2"
  640. type: "ReLU"
  641. bottom: "layer_512_1_conv1_h"
  642. top: "layer_512_1_conv1_h"
  643. }
  644. layer {
  645. name: "layer_512_1_conv2_h"
  646. type: "Convolution"
  647. bottom: "layer_512_1_conv1_h"
  648. top: "layer_512_1_conv2_h"
  649. param {
  650. lr_mult: 1.0
  651. decay_mult: 1.0
  652. }
  653. convolution_param {
  654. num_output: 256
  655. bias_term: false
  656. pad: 2 # 1
  657. kernel_size: 3
  658. stride: 1
  659. dilation: 2
  660. weight_filler {
  661. type: "msra"
  662. }
  663. bias_filler {
  664. type: "constant"
  665. value: 0.0
  666. }
  667. }
  668. }
  669. layer {
  670. name: "layer_512_1_conv_expand_h"
  671. type: "Convolution"
  672. bottom: "layer_512_1_bn1"
  673. top: "layer_512_1_conv_expand_h"
  674. param {
  675. lr_mult: 1.0
  676. decay_mult: 1.0
  677. }
  678. convolution_param {
  679. num_output: 256
  680. bias_term: false
  681. pad: 0
  682. kernel_size: 1
  683. stride: 1 # 2
  684. weight_filler {
  685. type: "msra"
  686. }
  687. bias_filler {
  688. type: "constant"
  689. value: 0.0
  690. }
  691. }
  692. }
  693. layer {
  694. name: "layer_512_1_sum"
  695. type: "Eltwise"
  696. bottom: "layer_512_1_conv2_h"
  697. bottom: "layer_512_1_conv_expand_h"
  698. top: "layer_512_1_sum"
  699. }
  700. layer {
  701. name: "last_bn_h"
  702. type: "BatchNorm"
  703. bottom: "layer_512_1_sum"
  704. top: "layer_512_1_sum"
  705. param {
  706. lr_mult: 0.0
  707. }
  708. param {
  709. lr_mult: 0.0
  710. }
  711. param {
  712. lr_mult: 0.0
  713. }
  714. }
  715. layer {
  716. name: "last_scale_h"
  717. type: "Scale"
  718. bottom: "layer_512_1_sum"
  719. top: "layer_512_1_sum"
  720. param {
  721. lr_mult: 1.0
  722. decay_mult: 1.0
  723. }
  724. param {
  725. lr_mult: 2.0
  726. decay_mult: 1.0
  727. }
  728. scale_param {
  729. bias_term: true
  730. }
  731. }
  732. layer {
  733. name: "last_relu"
  734. type: "ReLU"
  735. bottom: "layer_512_1_sum"
  736. top: "fc7"
  737. }
  738. layer {
  739. name: "conv6_1_h"
  740. type: "Convolution"
  741. bottom: "fc7"
  742. top: "conv6_1_h"
  743. param {
  744. lr_mult: 1
  745. decay_mult: 1
  746. }
  747. param {
  748. lr_mult: 2
  749. decay_mult: 0
  750. }
  751. convolution_param {
  752. num_output: 128
  753. pad: 0
  754. kernel_size: 1
  755. stride: 1
  756. weight_filler {
  757. type: "xavier"
  758. }
  759. bias_filler {
  760. type: "constant"
  761. value: 0
  762. }
  763. }
  764. }
  765. layer {
  766. name: "conv6_1_relu"
  767. type: "ReLU"
  768. bottom: "conv6_1_h"
  769. top: "conv6_1_h"
  770. }
  771. layer {
  772. name: "conv6_2_h"
  773. type: "Convolution"
  774. bottom: "conv6_1_h"
  775. top: "conv6_2_h"
  776. param {
  777. lr_mult: 1
  778. decay_mult: 1
  779. }
  780. param {
  781. lr_mult: 2
  782. decay_mult: 0
  783. }
  784. convolution_param {
  785. num_output: 256
  786. pad: 1
  787. kernel_size: 3
  788. stride: 2
  789. weight_filler {
  790. type: "xavier"
  791. }
  792. bias_filler {
  793. type: "constant"
  794. value: 0
  795. }
  796. }
  797. }
  798. layer {
  799. name: "conv6_2_relu"
  800. type: "ReLU"
  801. bottom: "conv6_2_h"
  802. top: "conv6_2_h"
  803. }
  804. layer {
  805. name: "conv7_1_h"
  806. type: "Convolution"
  807. bottom: "conv6_2_h"
  808. top: "conv7_1_h"
  809. param {
  810. lr_mult: 1
  811. decay_mult: 1
  812. }
  813. param {
  814. lr_mult: 2
  815. decay_mult: 0
  816. }
  817. convolution_param {
  818. num_output: 64
  819. pad: 0
  820. kernel_size: 1
  821. stride: 1
  822. weight_filler {
  823. type: "xavier"
  824. }
  825. bias_filler {
  826. type: "constant"
  827. value: 0
  828. }
  829. }
  830. }
  831. layer {
  832. name: "conv7_1_relu"
  833. type: "ReLU"
  834. bottom: "conv7_1_h"
  835. top: "conv7_1_h"
  836. }
  837. layer {
  838. name: "conv7_2_h"
  839. type: "Convolution"
  840. bottom: "conv7_1_h"
  841. top: "conv7_2_h"
  842. param {
  843. lr_mult: 1
  844. decay_mult: 1
  845. }
  846. param {
  847. lr_mult: 2
  848. decay_mult: 0
  849. }
  850. convolution_param {
  851. num_output: 128
  852. pad: 1
  853. kernel_size: 3
  854. stride: 2
  855. weight_filler {
  856. type: "xavier"
  857. }
  858. bias_filler {
  859. type: "constant"
  860. value: 0
  861. }
  862. }
  863. }
  864. layer {
  865. name: "conv7_2_relu"
  866. type: "ReLU"
  867. bottom: "conv7_2_h"
  868. top: "conv7_2_h"
  869. }
  870. layer {
  871. name: "conv8_1_h"
  872. type: "Convolution"
  873. bottom: "conv7_2_h"
  874. top: "conv8_1_h"
  875. param {
  876. lr_mult: 1
  877. decay_mult: 1
  878. }
  879. param {
  880. lr_mult: 2
  881. decay_mult: 0
  882. }
  883. convolution_param {
  884. num_output: 64
  885. pad: 0
  886. kernel_size: 1
  887. stride: 1
  888. weight_filler {
  889. type: "xavier"
  890. }
  891. bias_filler {
  892. type: "constant"
  893. value: 0
  894. }
  895. }
  896. }
  897. layer {
  898. name: "conv8_1_relu"
  899. type: "ReLU"
  900. bottom: "conv8_1_h"
  901. top: "conv8_1_h"
  902. }
  903. layer {
  904. name: "conv8_2_h"
  905. type: "Convolution"
  906. bottom: "conv8_1_h"
  907. top: "conv8_2_h"
  908. param {
  909. lr_mult: 1
  910. decay_mult: 1
  911. }
  912. param {
  913. lr_mult: 2
  914. decay_mult: 0
  915. }
  916. convolution_param {
  917. num_output: 128
  918. pad: 0
  919. kernel_size: 3
  920. stride: 1
  921. weight_filler {
  922. type: "xavier"
  923. }
  924. bias_filler {
  925. type: "constant"
  926. value: 0
  927. }
  928. }
  929. }
  930. layer {
  931. name: "conv8_2_relu"
  932. type: "ReLU"
  933. bottom: "conv8_2_h"
  934. top: "conv8_2_h"
  935. }
  936. layer {
  937. name: "conv9_1_h"
  938. type: "Convolution"
  939. bottom: "conv8_2_h"
  940. top: "conv9_1_h"
  941. param {
  942. lr_mult: 1
  943. decay_mult: 1
  944. }
  945. param {
  946. lr_mult: 2
  947. decay_mult: 0
  948. }
  949. convolution_param {
  950. num_output: 64
  951. pad: 0
  952. kernel_size: 1
  953. stride: 1
  954. weight_filler {
  955. type: "xavier"
  956. }
  957. bias_filler {
  958. type: "constant"
  959. value: 0
  960. }
  961. }
  962. }
  963. layer {
  964. name: "conv9_1_relu"
  965. type: "ReLU"
  966. bottom: "conv9_1_h"
  967. top: "conv9_1_h"
  968. }
  969. layer {
  970. name: "conv9_2_h"
  971. type: "Convolution"
  972. bottom: "conv9_1_h"
  973. top: "conv9_2_h"
  974. param {
  975. lr_mult: 1
  976. decay_mult: 1
  977. }
  978. param {
  979. lr_mult: 2
  980. decay_mult: 0
  981. }
  982. convolution_param {
  983. num_output: 128
  984. pad: 0
  985. kernel_size: 3
  986. stride: 1
  987. weight_filler {
  988. type: "xavier"
  989. }
  990. bias_filler {
  991. type: "constant"
  992. value: 0
  993. }
  994. }
  995. }
  996. layer {
  997. name: "conv9_2_relu"
  998. type: "ReLU"
  999. bottom: "conv9_2_h"
  1000. top: "conv9_2_h"
  1001. }
  1002. layer {
  1003. name: "conv4_3_norm"
  1004. type: "Normalize"
  1005. bottom: "layer_256_1_bn1"
  1006. top: "conv4_3_norm"
  1007. norm_param {
  1008. across_spatial: false
  1009. scale_filler {
  1010. type: "constant"
  1011. value: 20
  1012. }
  1013. channel_shared: false
  1014. }
  1015. }
  1016. layer {
  1017. name: "conv4_3_norm_mbox_loc"
  1018. type: "Convolution"
  1019. bottom: "conv4_3_norm"
  1020. top: "conv4_3_norm_mbox_loc"
  1021. param {
  1022. lr_mult: 1
  1023. decay_mult: 1
  1024. }
  1025. param {
  1026. lr_mult: 2
  1027. decay_mult: 0
  1028. }
  1029. convolution_param {
  1030. num_output: 16
  1031. pad: 1
  1032. kernel_size: 3
  1033. stride: 1
  1034. weight_filler {
  1035. type: "xavier"
  1036. }
  1037. bias_filler {
  1038. type: "constant"
  1039. value: 0
  1040. }
  1041. }
  1042. }
  1043. layer {
  1044. name: "conv4_3_norm_mbox_loc_perm"
  1045. type: "Permute"
  1046. bottom: "conv4_3_norm_mbox_loc"
  1047. top: "conv4_3_norm_mbox_loc_perm"
  1048. permute_param {
  1049. order: 0
  1050. order: 2
  1051. order: 3
  1052. order: 1
  1053. }
  1054. }
  1055. layer {
  1056. name: "conv4_3_norm_mbox_loc_flat"
  1057. type: "Flatten"
  1058. bottom: "conv4_3_norm_mbox_loc_perm"
  1059. top: "conv4_3_norm_mbox_loc_flat"
  1060. flatten_param {
  1061. axis: 1
  1062. }
  1063. }
  1064. layer {
  1065. name: "conv4_3_norm_mbox_conf"
  1066. type: "Convolution"
  1067. bottom: "conv4_3_norm"
  1068. top: "conv4_3_norm_mbox_conf"
  1069. param {
  1070. lr_mult: 1
  1071. decay_mult: 1
  1072. }
  1073. param {
  1074. lr_mult: 2
  1075. decay_mult: 0
  1076. }
  1077. convolution_param {
  1078. num_output: 8 # 84
  1079. pad: 1
  1080. kernel_size: 3
  1081. stride: 1
  1082. weight_filler {
  1083. type: "xavier"
  1084. }
  1085. bias_filler {
  1086. type: "constant"
  1087. value: 0
  1088. }
  1089. }
  1090. }
  1091. layer {
  1092. name: "conv4_3_norm_mbox_conf_perm"
  1093. type: "Permute"
  1094. bottom: "conv4_3_norm_mbox_conf"
  1095. top: "conv4_3_norm_mbox_conf_perm"
  1096. permute_param {
  1097. order: 0
  1098. order: 2
  1099. order: 3
  1100. order: 1
  1101. }
  1102. }
  1103. layer {
  1104. name: "conv4_3_norm_mbox_conf_flat"
  1105. type: "Flatten"
  1106. bottom: "conv4_3_norm_mbox_conf_perm"
  1107. top: "conv4_3_norm_mbox_conf_flat"
  1108. flatten_param {
  1109. axis: 1
  1110. }
  1111. }
  1112. layer {
  1113. name: "conv4_3_norm_mbox_priorbox"
  1114. type: "PriorBox"
  1115. bottom: "conv4_3_norm"
  1116. bottom: "data"
  1117. top: "conv4_3_norm_mbox_priorbox"
  1118. prior_box_param {
  1119. min_size: 30.0
  1120. max_size: 60.0
  1121. aspect_ratio: 2
  1122. flip: true
  1123. clip: false
  1124. variance: 0.1
  1125. variance: 0.1
  1126. variance: 0.2
  1127. variance: 0.2
  1128. step: 8
  1129. offset: 0.5
  1130. }
  1131. }
  1132. layer {
  1133. name: "fc7_mbox_loc"
  1134. type: "Convolution"
  1135. bottom: "fc7"
  1136. top: "fc7_mbox_loc"
  1137. param {
  1138. lr_mult: 1
  1139. decay_mult: 1
  1140. }
  1141. param {
  1142. lr_mult: 2
  1143. decay_mult: 0
  1144. }
  1145. convolution_param {
  1146. num_output: 24
  1147. pad: 1
  1148. kernel_size: 3
  1149. stride: 1
  1150. weight_filler {
  1151. type: "xavier"
  1152. }
  1153. bias_filler {
  1154. type: "constant"
  1155. value: 0
  1156. }
  1157. }
  1158. }
  1159. layer {
  1160. name: "fc7_mbox_loc_perm"
  1161. type: "Permute"
  1162. bottom: "fc7_mbox_loc"
  1163. top: "fc7_mbox_loc_perm"
  1164. permute_param {
  1165. order: 0
  1166. order: 2
  1167. order: 3
  1168. order: 1
  1169. }
  1170. }
  1171. layer {
  1172. name: "fc7_mbox_loc_flat"
  1173. type: "Flatten"
  1174. bottom: "fc7_mbox_loc_perm"
  1175. top: "fc7_mbox_loc_flat"
  1176. flatten_param {
  1177. axis: 1
  1178. }
  1179. }
  1180. layer {
  1181. name: "fc7_mbox_conf"
  1182. type: "Convolution"
  1183. bottom: "fc7"
  1184. top: "fc7_mbox_conf"
  1185. param {
  1186. lr_mult: 1
  1187. decay_mult: 1
  1188. }
  1189. param {
  1190. lr_mult: 2
  1191. decay_mult: 0
  1192. }
  1193. convolution_param {
  1194. num_output: 12 # 126
  1195. pad: 1
  1196. kernel_size: 3
  1197. stride: 1
  1198. weight_filler {
  1199. type: "xavier"
  1200. }
  1201. bias_filler {
  1202. type: "constant"
  1203. value: 0
  1204. }
  1205. }
  1206. }
  1207. layer {
  1208. name: "fc7_mbox_conf_perm"
  1209. type: "Permute"
  1210. bottom: "fc7_mbox_conf"
  1211. top: "fc7_mbox_conf_perm"
  1212. permute_param {
  1213. order: 0
  1214. order: 2
  1215. order: 3
  1216. order: 1
  1217. }
  1218. }
  1219. layer {
  1220. name: "fc7_mbox_conf_flat"
  1221. type: "Flatten"
  1222. bottom: "fc7_mbox_conf_perm"
  1223. top: "fc7_mbox_conf_flat"
  1224. flatten_param {
  1225. axis: 1
  1226. }
  1227. }
  1228. layer {
  1229. name: "fc7_mbox_priorbox"
  1230. type: "PriorBox"
  1231. bottom: "fc7"
  1232. bottom: "data"
  1233. top: "fc7_mbox_priorbox"
  1234. prior_box_param {
  1235. min_size: 60.0
  1236. max_size: 111.0
  1237. aspect_ratio: 2
  1238. aspect_ratio: 3
  1239. flip: true
  1240. clip: false
  1241. variance: 0.1
  1242. variance: 0.1
  1243. variance: 0.2
  1244. variance: 0.2
  1245. step: 16
  1246. offset: 0.5
  1247. }
  1248. }
  1249. layer {
  1250. name: "conv6_2_mbox_loc"
  1251. type: "Convolution"
  1252. bottom: "conv6_2_h"
  1253. top: "conv6_2_mbox_loc"
  1254. param {
  1255. lr_mult: 1
  1256. decay_mult: 1
  1257. }
  1258. param {
  1259. lr_mult: 2
  1260. decay_mult: 0
  1261. }
  1262. convolution_param {
  1263. num_output: 24
  1264. pad: 1
  1265. kernel_size: 3
  1266. stride: 1
  1267. weight_filler {
  1268. type: "xavier"
  1269. }
  1270. bias_filler {
  1271. type: "constant"
  1272. value: 0
  1273. }
  1274. }
  1275. }
  1276. layer {
  1277. name: "conv6_2_mbox_loc_perm"
  1278. type: "Permute"
  1279. bottom: "conv6_2_mbox_loc"
  1280. top: "conv6_2_mbox_loc_perm"
  1281. permute_param {
  1282. order: 0
  1283. order: 2
  1284. order: 3
  1285. order: 1
  1286. }
  1287. }
  1288. layer {
  1289. name: "conv6_2_mbox_loc_flat"
  1290. type: "Flatten"
  1291. bottom: "conv6_2_mbox_loc_perm"
  1292. top: "conv6_2_mbox_loc_flat"
  1293. flatten_param {
  1294. axis: 1
  1295. }
  1296. }
  1297. layer {
  1298. name: "conv6_2_mbox_conf"
  1299. type: "Convolution"
  1300. bottom: "conv6_2_h"
  1301. top: "conv6_2_mbox_conf"
  1302. param {
  1303. lr_mult: 1
  1304. decay_mult: 1
  1305. }
  1306. param {
  1307. lr_mult: 2
  1308. decay_mult: 0
  1309. }
  1310. convolution_param {
  1311. num_output: 12 # 126
  1312. pad: 1
  1313. kernel_size: 3
  1314. stride: 1
  1315. weight_filler {
  1316. type: "xavier"
  1317. }
  1318. bias_filler {
  1319. type: "constant"
  1320. value: 0
  1321. }
  1322. }
  1323. }
  1324. layer {
  1325. name: "conv6_2_mbox_conf_perm"
  1326. type: "Permute"
  1327. bottom: "conv6_2_mbox_conf"
  1328. top: "conv6_2_mbox_conf_perm"
  1329. permute_param {
  1330. order: 0
  1331. order: 2
  1332. order: 3
  1333. order: 1
  1334. }
  1335. }
  1336. layer {
  1337. name: "conv6_2_mbox_conf_flat"
  1338. type: "Flatten"
  1339. bottom: "conv6_2_mbox_conf_perm"
  1340. top: "conv6_2_mbox_conf_flat"
  1341. flatten_param {
  1342. axis: 1
  1343. }
  1344. }
  1345. layer {
  1346. name: "conv6_2_mbox_priorbox"
  1347. type: "PriorBox"
  1348. bottom: "conv6_2_h"
  1349. bottom: "data"
  1350. top: "conv6_2_mbox_priorbox"
  1351. prior_box_param {
  1352. min_size: 111.0
  1353. max_size: 162.0
  1354. aspect_ratio: 2
  1355. aspect_ratio: 3
  1356. flip: true
  1357. clip: false
  1358. variance: 0.1
  1359. variance: 0.1
  1360. variance: 0.2
  1361. variance: 0.2
  1362. step: 32
  1363. offset: 0.5
  1364. }
  1365. }
  1366. layer {
  1367. name: "conv7_2_mbox_loc"
  1368. type: "Convolution"
  1369. bottom: "conv7_2_h"
  1370. top: "conv7_2_mbox_loc"
  1371. param {
  1372. lr_mult: 1
  1373. decay_mult: 1
  1374. }
  1375. param {
  1376. lr_mult: 2
  1377. decay_mult: 0
  1378. }
  1379. convolution_param {
  1380. num_output: 24
  1381. pad: 1
  1382. kernel_size: 3
  1383. stride: 1
  1384. weight_filler {
  1385. type: "xavier"
  1386. }
  1387. bias_filler {
  1388. type: "constant"
  1389. value: 0
  1390. }
  1391. }
  1392. }
  1393. layer {
  1394. name: "conv7_2_mbox_loc_perm"
  1395. type: "Permute"
  1396. bottom: "conv7_2_mbox_loc"
  1397. top: "conv7_2_mbox_loc_perm"
  1398. permute_param {
  1399. order: 0
  1400. order: 2
  1401. order: 3
  1402. order: 1
  1403. }
  1404. }
  1405. layer {
  1406. name: "conv7_2_mbox_loc_flat"
  1407. type: "Flatten"
  1408. bottom: "conv7_2_mbox_loc_perm"
  1409. top: "conv7_2_mbox_loc_flat"
  1410. flatten_param {
  1411. axis: 1
  1412. }
  1413. }
  1414. layer {
  1415. name: "conv7_2_mbox_conf"
  1416. type: "Convolution"
  1417. bottom: "conv7_2_h"
  1418. top: "conv7_2_mbox_conf"
  1419. param {
  1420. lr_mult: 1
  1421. decay_mult: 1
  1422. }
  1423. param {
  1424. lr_mult: 2
  1425. decay_mult: 0
  1426. }
  1427. convolution_param {
  1428. num_output: 12 # 126
  1429. pad: 1
  1430. kernel_size: 3
  1431. stride: 1
  1432. weight_filler {
  1433. type: "xavier"
  1434. }
  1435. bias_filler {
  1436. type: "constant"
  1437. value: 0
  1438. }
  1439. }
  1440. }
  1441. layer {
  1442. name: "conv7_2_mbox_conf_perm"
  1443. type: "Permute"
  1444. bottom: "conv7_2_mbox_conf"
  1445. top: "conv7_2_mbox_conf_perm"
  1446. permute_param {
  1447. order: 0
  1448. order: 2
  1449. order: 3
  1450. order: 1
  1451. }
  1452. }
  1453. layer {
  1454. name: "conv7_2_mbox_conf_flat"
  1455. type: "Flatten"
  1456. bottom: "conv7_2_mbox_conf_perm"
  1457. top: "conv7_2_mbox_conf_flat"
  1458. flatten_param {
  1459. axis: 1
  1460. }
  1461. }
  1462. layer {
  1463. name: "conv7_2_mbox_priorbox"
  1464. type: "PriorBox"
  1465. bottom: "conv7_2_h"
  1466. bottom: "data"
  1467. top: "conv7_2_mbox_priorbox"
  1468. prior_box_param {
  1469. min_size: 162.0
  1470. max_size: 213.0
  1471. aspect_ratio: 2
  1472. aspect_ratio: 3
  1473. flip: true
  1474. clip: false
  1475. variance: 0.1
  1476. variance: 0.1
  1477. variance: 0.2
  1478. variance: 0.2
  1479. step: 64
  1480. offset: 0.5
  1481. }
  1482. }
  1483. layer {
  1484. name: "conv8_2_mbox_loc"
  1485. type: "Convolution"
  1486. bottom: "conv8_2_h"
  1487. top: "conv8_2_mbox_loc"
  1488. param {
  1489. lr_mult: 1
  1490. decay_mult: 1
  1491. }
  1492. param {
  1493. lr_mult: 2
  1494. decay_mult: 0
  1495. }
  1496. convolution_param {
  1497. num_output: 16
  1498. pad: 1
  1499. kernel_size: 3
  1500. stride: 1
  1501. weight_filler {
  1502. type: "xavier"
  1503. }
  1504. bias_filler {
  1505. type: "constant"
  1506. value: 0
  1507. }
  1508. }
  1509. }
  1510. layer {
  1511. name: "conv8_2_mbox_loc_perm"
  1512. type: "Permute"
  1513. bottom: "conv8_2_mbox_loc"
  1514. top: "conv8_2_mbox_loc_perm"
  1515. permute_param {
  1516. order: 0
  1517. order: 2
  1518. order: 3
  1519. order: 1
  1520. }
  1521. }
  1522. layer {
  1523. name: "conv8_2_mbox_loc_flat"
  1524. type: "Flatten"
  1525. bottom: "conv8_2_mbox_loc_perm"
  1526. top: "conv8_2_mbox_loc_flat"
  1527. flatten_param {
  1528. axis: 1
  1529. }
  1530. }
  1531. layer {
  1532. name: "conv8_2_mbox_conf"
  1533. type: "Convolution"
  1534. bottom: "conv8_2_h"
  1535. top: "conv8_2_mbox_conf"
  1536. param {
  1537. lr_mult: 1
  1538. decay_mult: 1
  1539. }
  1540. param {
  1541. lr_mult: 2
  1542. decay_mult: 0
  1543. }
  1544. convolution_param {
  1545. num_output: 8 # 84
  1546. pad: 1
  1547. kernel_size: 3
  1548. stride: 1
  1549. weight_filler {
  1550. type: "xavier"
  1551. }
  1552. bias_filler {
  1553. type: "constant"
  1554. value: 0
  1555. }
  1556. }
  1557. }
  1558. layer {
  1559. name: "conv8_2_mbox_conf_perm"
  1560. type: "Permute"
  1561. bottom: "conv8_2_mbox_conf"
  1562. top: "conv8_2_mbox_conf_perm"
  1563. permute_param {
  1564. order: 0
  1565. order: 2
  1566. order: 3
  1567. order: 1
  1568. }
  1569. }
  1570. layer {
  1571. name: "conv8_2_mbox_conf_flat"
  1572. type: "Flatten"
  1573. bottom: "conv8_2_mbox_conf_perm"
  1574. top: "conv8_2_mbox_conf_flat"
  1575. flatten_param {
  1576. axis: 1
  1577. }
  1578. }
  1579. layer {
  1580. name: "conv8_2_mbox_priorbox"
  1581. type: "PriorBox"
  1582. bottom: "conv8_2_h"
  1583. bottom: "data"
  1584. top: "conv8_2_mbox_priorbox"
  1585. prior_box_param {
  1586. min_size: 213.0
  1587. max_size: 264.0
  1588. aspect_ratio: 2
  1589. flip: true
  1590. clip: false
  1591. variance: 0.1
  1592. variance: 0.1
  1593. variance: 0.2
  1594. variance: 0.2
  1595. step: 100
  1596. offset: 0.5
  1597. }
  1598. }
  1599. layer {
  1600. name: "conv9_2_mbox_loc"
  1601. type: "Convolution"
  1602. bottom: "conv9_2_h"
  1603. top: "conv9_2_mbox_loc"
  1604. param {
  1605. lr_mult: 1
  1606. decay_mult: 1
  1607. }
  1608. param {
  1609. lr_mult: 2
  1610. decay_mult: 0
  1611. }
  1612. convolution_param {
  1613. num_output: 16
  1614. pad: 1
  1615. kernel_size: 3
  1616. stride: 1
  1617. weight_filler {
  1618. type: "xavier"
  1619. }
  1620. bias_filler {
  1621. type: "constant"
  1622. value: 0
  1623. }
  1624. }
  1625. }
  1626. layer {
  1627. name: "conv9_2_mbox_loc_perm"
  1628. type: "Permute"
  1629. bottom: "conv9_2_mbox_loc"
  1630. top: "conv9_2_mbox_loc_perm"
  1631. permute_param {
  1632. order: 0
  1633. order: 2
  1634. order: 3
  1635. order: 1
  1636. }
  1637. }
  1638. layer {
  1639. name: "conv9_2_mbox_loc_flat"
  1640. type: "Flatten"
  1641. bottom: "conv9_2_mbox_loc_perm"
  1642. top: "conv9_2_mbox_loc_flat"
  1643. flatten_param {
  1644. axis: 1
  1645. }
  1646. }
  1647. layer {
  1648. name: "conv9_2_mbox_conf"
  1649. type: "Convolution"
  1650. bottom: "conv9_2_h"
  1651. top: "conv9_2_mbox_conf"
  1652. param {
  1653. lr_mult: 1
  1654. decay_mult: 1
  1655. }
  1656. param {
  1657. lr_mult: 2
  1658. decay_mult: 0
  1659. }
  1660. convolution_param {
  1661. num_output: 8 # 84
  1662. pad: 1
  1663. kernel_size: 3
  1664. stride: 1
  1665. weight_filler {
  1666. type: "xavier"
  1667. }
  1668. bias_filler {
  1669. type: "constant"
  1670. value: 0
  1671. }
  1672. }
  1673. }
  1674. layer {
  1675. name: "conv9_2_mbox_conf_perm"
  1676. type: "Permute"
  1677. bottom: "conv9_2_mbox_conf"
  1678. top: "conv9_2_mbox_conf_perm"
  1679. permute_param {
  1680. order: 0
  1681. order: 2
  1682. order: 3
  1683. order: 1
  1684. }
  1685. }
  1686. layer {
  1687. name: "conv9_2_mbox_conf_flat"
  1688. type: "Flatten"
  1689. bottom: "conv9_2_mbox_conf_perm"
  1690. top: "conv9_2_mbox_conf_flat"
  1691. flatten_param {
  1692. axis: 1
  1693. }
  1694. }
  1695. layer {
  1696. name: "conv9_2_mbox_priorbox"
  1697. type: "PriorBox"
  1698. bottom: "conv9_2_h"
  1699. bottom: "data"
  1700. top: "conv9_2_mbox_priorbox"
  1701. prior_box_param {
  1702. min_size: 264.0
  1703. max_size: 315.0
  1704. aspect_ratio: 2
  1705. flip: true
  1706. clip: false
  1707. variance: 0.1
  1708. variance: 0.1
  1709. variance: 0.2
  1710. variance: 0.2
  1711. step: 300
  1712. offset: 0.5
  1713. }
  1714. }
  1715. layer {
  1716. name: "mbox_loc"
  1717. type: "Concat"
  1718. bottom: "conv4_3_norm_mbox_loc_flat"
  1719. bottom: "fc7_mbox_loc_flat"
  1720. bottom: "conv6_2_mbox_loc_flat"
  1721. bottom: "conv7_2_mbox_loc_flat"
  1722. bottom: "conv8_2_mbox_loc_flat"
  1723. bottom: "conv9_2_mbox_loc_flat"
  1724. top: "mbox_loc"
  1725. concat_param {
  1726. axis: 1
  1727. }
  1728. }
  1729. layer {
  1730. name: "mbox_conf"
  1731. type: "Concat"
  1732. bottom: "conv4_3_norm_mbox_conf_flat"
  1733. bottom: "fc7_mbox_conf_flat"
  1734. bottom: "conv6_2_mbox_conf_flat"
  1735. bottom: "conv7_2_mbox_conf_flat"
  1736. bottom: "conv8_2_mbox_conf_flat"
  1737. bottom: "conv9_2_mbox_conf_flat"
  1738. top: "mbox_conf"
  1739. concat_param {
  1740. axis: 1
  1741. }
  1742. }
  1743. layer {
  1744. name: "mbox_priorbox"
  1745. type: "Concat"
  1746. bottom: "conv4_3_norm_mbox_priorbox"
  1747. bottom: "fc7_mbox_priorbox"
  1748. bottom: "conv6_2_mbox_priorbox"
  1749. bottom: "conv7_2_mbox_priorbox"
  1750. bottom: "conv8_2_mbox_priorbox"
  1751. bottom: "conv9_2_mbox_priorbox"
  1752. top: "mbox_priorbox"
  1753. concat_param {
  1754. axis: 2
  1755. }
  1756. }
  1757. layer {
  1758. name: "mbox_conf_reshape"
  1759. type: "Reshape"
  1760. bottom: "mbox_conf"
  1761. top: "mbox_conf_reshape"
  1762. reshape_param {
  1763. shape {
  1764. dim: 0
  1765. dim: -1
  1766. dim: 2
  1767. }
  1768. }
  1769. }
  1770. layer {
  1771. name: "mbox_conf_softmax"
  1772. type: "Softmax"
  1773. bottom: "mbox_conf_reshape"
  1774. top: "mbox_conf_softmax"
  1775. softmax_param {
  1776. axis: 2
  1777. }
  1778. }
  1779. layer {
  1780. name: "mbox_conf_flatten"
  1781. type: "Flatten"
  1782. bottom: "mbox_conf_softmax"
  1783. top: "mbox_conf_flatten"
  1784. flatten_param {
  1785. axis: 1
  1786. }
  1787. }
  1788. layer {
  1789. name: "detection_out"
  1790. type: "DetectionOutput"
  1791. bottom: "mbox_loc"
  1792. bottom: "mbox_conf_flatten"
  1793. bottom: "mbox_priorbox"
  1794. top: "detection_out"
  1795. include {
  1796. phase: TEST
  1797. }
  1798. detection_output_param {
  1799. num_classes: 2
  1800. share_location: true
  1801. background_label_id: 0
  1802. nms_param {
  1803. nms_threshold: 0.45
  1804. top_k: 400
  1805. }
  1806. code_type: CENTER_SIZE
  1807. keep_top_k: 200
  1808. confidence_threshold: 0.01
  1809. clip: 1
  1810. }
  1811. }
  1812. layer {
  1813. name: "detection_eval"
  1814. type: "DetectionEvaluate"
  1815. bottom: "detection_out"
  1816. bottom: "label"
  1817. top: "detection_eval"
  1818. include {
  1819. phase: TEST
  1820. }
  1821. detection_evaluate_param {
  1822. num_classes: 2
  1823. background_label_id: 0
  1824. overlap_threshold: 0.5
  1825. evaluate_difficult_gt: false
  1826. }
  1827. }