Added classifier_densenet201.cmd

This commit is contained in:
AlexeyAB
2017-09-14 22:14:16 +03:00
parent fe6e694e17
commit e8dd9dd877
50 changed files with 83191 additions and 0 deletions

9418
cfg/9k.labels Normal file

File diff suppressed because it is too large Load Diff

9418
cfg/9k.names Normal file

File diff suppressed because it is too large Load Diff

8714
cfg/9k.tree Normal file

File diff suppressed because one or more lines are too long

80
cfg/coco.names Normal file
View File

@ -0,0 +1,80 @@
person
bicycle
car
motorbike
aeroplane
bus
train
truck
boat
traffic light
fire hydrant
stop sign
parking meter
bench
bird
cat
dog
horse
sheep
cow
elephant
bear
zebra
giraffe
backpack
umbrella
handbag
tie
suitcase
frisbee
skis
snowboard
sports ball
kite
baseball bat
baseball glove
skateboard
surfboard
tennis racket
bottle
wine glass
cup
fork
knife
spoon
bowl
banana
apple
sandwich
orange
broccoli
carrot
hot dog
pizza
donut
cake
chair
sofa
pottedplant
bed
diningtable
toilet
tvmonitor
laptop
mouse
remote
keyboard
cell phone
microwave
oven
toaster
sink
refrigerator
book
clock
vase
scissors
teddy bear
hair drier
toothbrush

80
cfg/coco9k.map Normal file
View File

@ -0,0 +1,80 @@
5177
3768
3802
3800
4107
4072
4071
3797
4097
2645
5150
2644
3257
2523
6527
6866
6912
7342
7255
7271
7217
6858
7343
7233
3704
4374
3641
5001
3899
2999
2631
5141
2015
1133
1935
1930
5144
5143
2371
3916
3745
3640
4749
4736
4735
3678
58
42
771
81
152
141
786
700
218
791
2518
2521
3637
2458
2505
2519
3499
2837
3503
2597
3430
2080
5103
5111
5102
3013
5096
1102
3218
4010
2266
1127
5122
2360

1954
cfg/densenet201.cfg Normal file

File diff suppressed because it is too large Load Diff

21842
cfg/imagenet.labels.list Normal file

File diff suppressed because it is too large Load Diff

21842
cfg/imagenet.shortnames.list Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,9 @@
classes=9418
train = data/9k.train.list
valid = /data/imagenet/imagenet1k.valid.list
leaves = data/imagenet1k.labels
backup = /home/pjreddie/backup/
labels = data/9k.labels
names = data/9k.names
top=5

200
cfg/inet9k.map Normal file
View File

@ -0,0 +1,200 @@
2687
4107
8407
7254
42
6797
127
2268
2442
3704
260
1970
58
4443
2661
2043
2039
4858
4007
6858
8408
166
2523
3768
4347
6527
2446
5005
3274
3678
4918
709
4072
8428
7223
2251
3802
3848
7271
2677
8267
2849
2518
2738
3746
5105
3430
3503
2249
1841
2032
2358
122
3984
4865
3246
5095
6912
6878
8467
2741
1973
3057
7217
1872
44
2452
3637
2704
6917
2715
6734
2325
6864
6677
2035
1949
338
2664
5122
1844
784
2223
7188
2719
2670
4830
158
4818
7228
1965
7342
786
2095
8281
8258
7406
3915
8382
2437
2837
82
6871
1876
7447
8285
5007
2740
3463
5103
3755
4910
6809
3800
118
3396
3092
2709
81
7105
4036
2366
1846
5177
2684
64
2041
3919
700
3724
1742
39
807
7184
2256
235
2778
2996
2030
3714
7167
2369
6705
6861
5096
2597
2166
2036
3228
3747
2711
8300
2226
7153
7255
2631
7109
8242
7445
3776
3803
3690
2025
2521
2316
7190
8249
3352
2639
2887
100
4219
3344
5008
7224
3351
2434
2074
2034
8304
5004
6868
5102
2645
4071
2716
2717
7420
3499
3763
5084
2676
2046
5107
5097
3944
4097
7132
3956
7343

511
cfg/resnet50.cfg Normal file
View File

@ -0,0 +1,511 @@
[net]
# Training
# batch=128
# subdivisions=4
# Testing
batch=1
subdivisions=1
height=256
width=256
max_crop=448
channels=3
momentum=0.9
decay=0.0005
burn_in=1000
learning_rate=0.1
policy=poly
power=4
max_batches=1600000
angle=7
hue=.1
saturation=.75
exposure=.75
aspect=.75
[convolutional]
batch_normalize=1
filters=64
size=7
stride=2
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=64
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=3
stride=2
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
# Conv 4
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=2
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
#Conv 5
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=2
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=2048
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=2048
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=2048
size=1
stride=1
pad=1
activation=linear
[shortcut]
from=-4
activation=leaky
[convolutional]
filters=1000
size=1
stride=1
pad=1
activation=linear
[avgpool]
[softmax]
groups=1
[cost]
type=sse