Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Get predictions without functions #82

Open
mattiaachilli opened this issue Jul 14, 2020 · 1 comment
Open

Get predictions without functions #82

mattiaachilli opened this issue Jul 14, 2020 · 1 comment

Comments

@mattiaachilli
Copy link

Can I get directly the predictions without using functions? For example can I modify last layer for do that?

@imistyrain
Copy link

you should add some layers without params, if use caffe, the prototxt would be

layer {
  name: "data"
  type: "Input"
  top: "data"
  input_param { shape: { dim: 1 dim: 3 dim: 260 dim: 260 } }
}
layer {
  name: "conv2d_0"
  type: "Convolution"
  bottom: "data"
  top: "conv2d_0"
  convolution_param {
    num_output: 32
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_0_activation"
  type: "ReLU"
  bottom: "conv2d_0"
  top: "conv2d_0"
}
layer {
  name: "maxpool2d_0"
  type: "Pooling"
  bottom: "conv2d_0"
  top: "maxpool2d_0"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2d_1"
  type: "Convolution"
  bottom: "maxpool2d_0"
  top: "conv2d_1"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_1_activation"
  type: "ReLU"
  bottom: "conv2d_1"
  top: "conv2d_1"
}
layer {
  name: "maxpool2d_1"
  type: "Pooling"
  bottom: "conv2d_1"
  top: "maxpool2d_1"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2d_2"
  type: "Convolution"
  bottom: "maxpool2d_1"
  top: "conv2d_2"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_2_activation"
  type: "ReLU"
  bottom: "conv2d_2"
  top: "conv2d_2"
}
layer {
  name: "maxpool2d_2"
  type: "Pooling"
  bottom: "conv2d_2"
  top: "maxpool2d_2"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2d_3"
  type: "Convolution"
  bottom: "maxpool2d_2"
  top: "conv2d_3"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_3_activation"
  type: "ReLU"
  bottom: "conv2d_3"
  top: "conv2d_3"
}
layer {
  name: "maxpool2d_3"
  type: "Pooling"
  bottom: "conv2d_3"
  top: "maxpool2d_3"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2d_4"
  type: "Convolution"
  bottom: "maxpool2d_3"
  top: "conv2d_4"
  convolution_param {
    num_output: 128
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_4_activation"
  type: "ReLU"
  bottom: "conv2d_4"
  top: "conv2d_4"
}
layer {
  name: "maxpool2d_4"
  type: "Pooling"
  bottom: "conv2d_4"
  top: "maxpool2d_4"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2d_5"
  type: "Convolution"
  bottom: "maxpool2d_4"
  top: "conv2d_5"
  convolution_param {
    num_output: 128
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_5_activation"
  type: "ReLU"
  bottom: "conv2d_5"
  top: "conv2d_5"
}
layer {
  name: "maxpool2d_5"
  type: "Pooling"
  bottom: "conv2d_5"
  top: "maxpool2d_5"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2d_6"
  type: "Convolution"
  bottom: "maxpool2d_5"
  top: "conv2d_6"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_6_activation"
  type: "ReLU"
  bottom: "conv2d_6"
  top: "conv2d_6"
}
layer {
  name: "conv2d_7"
  type: "Convolution"
  bottom: "conv2d_6"
  top: "conv2d_7"
  convolution_param {
    num_output: 64
    bias_term: true
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "conv2d_7_activation"
  type: "ReLU"
  bottom: "conv2d_7"
  top: "conv2d_7"
}
layer {
  name: "cls_0_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_3"
  top: "cls_0_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_1_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_4"
  top: "cls_1_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_2_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_5"
  top: "cls_2_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_3_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_6"
  top: "cls_3_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_4_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_7"
  top: "cls_4_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_0_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_3"
  top: "loc_0_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_1_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_4"
  top: "loc_1_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_2_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_5"
  top: "loc_2_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_3_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_6"
  top: "loc_3_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_4_insert_conv2d"
  type: "Convolution"
  bottom: "conv2d_7"
  top: "loc_4_insert_conv2d"
  convolution_param {
    num_output: 64
    bias_term: true
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_0_insert_conv2d_activation"
  type: "ReLU"
  bottom: "cls_0_insert_conv2d"
  top: "cls_0_insert_conv2d"
}
layer {
  name: "cls_1_insert_conv2d_activation"
  type: "ReLU"
  bottom: "cls_1_insert_conv2d"
  top: "cls_1_insert_conv2d"
}
layer {
  name: "cls_2_insert_conv2d_activation"
  type: "ReLU"
  bottom: "cls_2_insert_conv2d"
  top: "cls_2_insert_conv2d"
}
layer {
  name: "cls_3_insert_conv2d_activation"
  type: "ReLU"
  bottom: "cls_3_insert_conv2d"
  top: "cls_3_insert_conv2d"
}
layer {
  name: "cls_4_insert_conv2d_activation"
  type: "ReLU"
  bottom: "cls_4_insert_conv2d"
  top: "cls_4_insert_conv2d"
}
layer {
  name: "loc_0_insert_conv2d_activation"
  type: "ReLU"
  bottom: "loc_0_insert_conv2d"
  top: "loc_0_insert_conv2d"
}
layer {
  name: "loc_1_insert_conv2d_activation"
  type: "ReLU"
  bottom: "loc_1_insert_conv2d"
  top: "loc_1_insert_conv2d"
}
layer {
  name: "loc_2_insert_conv2d_activation"
  type: "ReLU"
  bottom: "loc_2_insert_conv2d"
  top: "loc_2_insert_conv2d"
}
layer {
  name: "loc_3_insert_conv2d_activation"
  type: "ReLU"
  bottom: "loc_3_insert_conv2d"
  top: "loc_3_insert_conv2d"
}
layer {
  name: "loc_4_insert_conv2d_activation"
  type: "ReLU"
  bottom: "loc_4_insert_conv2d"
  top: "loc_4_insert_conv2d"
}
layer {
  name: "cls_0_conv"
  type: "Convolution"
  bottom: "cls_0_insert_conv2d"
  top: "cls_0_conv"
  convolution_param {
    num_output: 8
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_1_conv"
  type: "Convolution"
  bottom: "cls_1_insert_conv2d"
  top: "cls_1_conv"
  convolution_param {
    num_output: 8
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_2_conv"
  type: "Convolution"
  bottom: "cls_2_insert_conv2d"
  top: "cls_2_conv"
  convolution_param {
    num_output: 8
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_3_conv"
  type: "Convolution"
  bottom: "cls_3_insert_conv2d"
  top: "cls_3_conv"
  convolution_param {
    num_output: 8
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_4_conv"
  type: "Convolution"
  bottom: "cls_4_insert_conv2d"
  top: "cls_4_conv"
  convolution_param {
    num_output: 8
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_0_conv"
  type: "Convolution"
  bottom: "loc_0_insert_conv2d"
  top: "loc_0_conv"
  convolution_param {
    num_output: 16
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_1_conv"
  type: "Convolution"
  bottom: "loc_1_insert_conv2d"
  top: "loc_1_conv"
  convolution_param {
    num_output: 16
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_2_conv"
  type: "Convolution"
  bottom: "loc_2_insert_conv2d"
  top: "loc_2_conv"
  convolution_param {
    num_output: 16
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_3_conv"
  type: "Convolution"
  bottom: "loc_3_insert_conv2d"
  top: "loc_3_conv"
  convolution_param {
    num_output: 16
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "loc_4_conv"
  type: "Convolution"
  bottom: "loc_4_insert_conv2d"
  top: "loc_4_conv"
  convolution_param {
    num_output: 16
    pad: 1
    kernel_size: 3
    stride: 1
  }
}
layer {
  name: "cls_0_conv_permute"
  type: "Permute"
  bottom: "cls_0_conv"
  top: "cls_0_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "cls_0_reshape"
  type: "Reshape"
  bottom: "cls_0_conv_permute"
  top: "cls_0_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 2
    }
  }
}
layer {
  name: "cls_1_conv_permute"
  type: "Permute"
  bottom: "cls_1_conv"
  top: "cls_1_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "cls_1_reshape"
  type: "Reshape"
  bottom: "cls_1_conv_permute"
  top: "cls_1_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 2
    }
  }
}
layer {
  name: "cls_2_conv_permute"
  type: "Permute"
  bottom: "cls_2_conv"
  top: "cls_2_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "cls_2_reshape"
  type: "Reshape"
  bottom: "cls_2_conv_permute"
  top: "cls_2_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 2
    }
  }
}
layer {
  name: "cls_3_conv_permute"
  type: "Permute"
  bottom: "cls_3_conv"
  top: "cls_3_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "cls_3_reshape"
  type: "Reshape"
  bottom: "cls_3_conv_permute"
  top: "cls_3_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 2
    }
  }
}
layer {
  name: "cls_4_conv_permute"
  type: "Permute"
  bottom: "cls_4_conv"
  top: "cls_4_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "cls_4_reshape"
  type: "Reshape"
  bottom: "cls_4_conv_permute"
  top: "cls_4_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 2
    }
  }
}
layer {
  name: "loc_0_conv_permute"
  type: "Permute"
  bottom: "loc_0_conv"
  top: "loc_0_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "loc_0_flatten"
  type: "Flatten"
  bottom: "loc_0_conv_permute"
  top: "loc_0_flatten"
  flatten_param {
    axis : 1
  }
}
layer {
  name: "loc_1_conv_permute"
  type: "Permute"
  bottom: "loc_1_conv"
  top: "loc_1_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "loc_1_flatten"
  type: "Flatten"
  bottom: "loc_1_conv_permute"
  top: "loc_1_flatten"
  flatten_param {
    axis : 1
  }
}
layer {
  name: "loc_2_conv_permute"
  type: "Permute"
  bottom: "loc_2_conv"
  top: "loc_2_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "loc_2_flatten"
  type: "Flatten"
  bottom: "loc_2_conv_permute"
  top: "loc_2_flatten"
  flatten_param {
    axis : 1
  }
}
layer {
  name: "loc_3_conv_permute"
  type: "Permute"
  bottom: "loc_3_conv"
  top: "loc_3_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "loc_3_flatten"
  type: "Flatten"
  bottom: "loc_3_conv_permute"
  top: "loc_3_flatten"
  flatten_param {
    axis : 1
  }
}
layer {
  name: "loc_4_conv_permute"
  type: "Permute"
  bottom: "loc_4_conv"
  top: "loc_4_conv_permute"
  permute_param {
    order: 0
    order: 2
    order: 3
    order: 1
  }
}
layer {
  name: "loc_4_flatten"
  type: "Flatten"
  bottom: "loc_4_conv_permute"
  top: "loc_4_flatten"
  flatten_param {
    axis : 1
  }
}
layer {
  name: "cls_0_activation"
  type: "Sigmoid"
  bottom: "cls_0_reshape"
  top: "cls_0_reshape"
}
layer {
  name: "cls_1_activation"
  type: "Sigmoid"
  bottom: "cls_1_reshape"
  top: "cls_1_reshape"
}
layer {
  name: "cls_2_activation"
  type: "Sigmoid"
  bottom: "cls_2_reshape"
  top: "cls_2_reshape"
}
layer {
  name: "cls_3_activation"
  type: "Sigmoid"
  bottom: "cls_3_reshape"
  top: "cls_3_reshape"
}
layer {
  name: "cls_4_activation"
  type: "Sigmoid"
  bottom: "cls_4_reshape"
  top: "cls_4_reshape"
}
layer {
  name: "loc_branch_concat"
  type: "Concat"
  bottom: "loc_0_flatten"
  bottom: "loc_1_flatten"
  bottom: "loc_2_flatten"
  bottom: "loc_3_flatten"
  bottom: "loc_4_flatten"
  top: "mbox_loc"
  concat_param {
    axis: 1
  }
}
layer {
  name: "cls_branch_concat"
  type: "Concat"
  bottom: "cls_0_reshape"
  bottom: "cls_1_reshape"
  bottom: "cls_2_reshape"
  bottom: "cls_3_reshape"
  bottom: "cls_4_reshape"
  top: "cls_branch_concat"
  concat_param {
    axis: 1
  }
}
#priorbox
layer {
  name: "conv2d_3_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv2d_3"
  bottom: "data"
  top: "conv2d_3_mbox_priorbox"
  prior_box_param {
    min_size: 10.4
    max_size: 14.56
    aspect_ratio: 0.62
    aspect_ratio: 0.42
    flip: false
    clip: true
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv2d_4_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv2d_4"
  bottom: "data"
  top: "conv2d_4_mbox_priorbox"
  prior_box_param {
    min_size: 20.8
    max_size: 28.6
    aspect_ratio: 0.62
    aspect_ratio: 0.42
    flip: false
    clip: true
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv2d_5_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv2d_5"
  bottom: "data"
  top: "conv2d_5_mbox_priorbox"
  prior_box_param {
    min_size: 41.6
    max_size: 57.2
    aspect_ratio: 0.62
    aspect_ratio: 0.42
    flip: false
    clip: true
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv2d_6_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv2d_6"
  bottom: "data"
  top: "conv2d_6_mbox_priorbox"
  prior_box_param {
    min_size: 83.2
    max_size: 117
    aspect_ratio: 0.62
    aspect_ratio: 0.42
    flip: false
    clip: true
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "conv2d_7_mbox_priorbox"
  type: "PriorBox"
  bottom: "conv2d_7"
  bottom: "data"
  top: "conv2d_7_mbox_priorbox"
  prior_box_param {
    min_size: 166.4
    max_size: 187.2
    aspect_ratio: 0.62
    aspect_ratio: 0.42
    flip: false
    clip: true
    variance: 0.1
    variance: 0.1
    variance: 0.2
    variance: 0.2
    offset: 0.5
  }
}
layer {
  name: "mbox_priorbox"
  type: "Concat"
  bottom: "conv2d_3_mbox_priorbox"
  bottom: "conv2d_4_mbox_priorbox"
  bottom: "conv2d_5_mbox_priorbox"
  bottom: "conv2d_6_mbox_priorbox"
  bottom: "conv2d_7_mbox_priorbox"
  top: "mbox_priorbox"
  concat_param {
    axis: 2
  }
}
layer {
  name: "cls_branch_reshape"
  type: "Reshape"
  bottom: "cls_branch_concat"
  top: "cls_branch_reshape"
  reshape_param {
    shape {
      dim: 0
      dim: -1
      dim: 2
    }
  }
}
layer {
  name: "mbox_conf"
  type: "Flatten"
  bottom: "cls_branch_reshape"
  top: "mbox_conf"
  flatten_param {
    axis: 1
  }
}
layer {
  name: "detection_out"
  type: "DetectionOutput"
  bottom: "mbox_loc"
  bottom: "mbox_conf"
  bottom: "mbox_priorbox"
  top: "detection_out"
  include {
    phase: TEST
  }
  detection_output_param {
    num_classes: 2
    share_location: true
    background_label_id: 0
    nms_param {
      nms_threshold: 0.45
      top_k: 100
    }
    code_type: CENTER_SIZE
    keep_top_k: 100
    confidence_threshold: 0.5
  }
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants