Spaces:
Configuration error
Configuration error
| model { | |
| faster_rcnn { | |
| num_classes: 30 | |
| image_resizer { | |
| keep_aspect_ratio_resizer { | |
| min_dimension: 256 | |
| max_dimension: 640 | |
| } | |
| } | |
| feature_extractor { | |
| type: "faster_rcnn_inception_resnet_v2" | |
| first_stage_features_stride: 8 | |
| } | |
| first_stage_anchor_generator { | |
| grid_anchor_generator { | |
| height_stride: 8 | |
| width_stride: 8 | |
| scales: 0.25 | |
| scales: 0.5 | |
| scales: 1.0 | |
| scales: 2.0 | |
| aspect_ratios: 0.5 | |
| aspect_ratios: 1.0 | |
| aspect_ratios: 2.0 | |
| } | |
| } | |
| first_stage_atrous_rate: 2 | |
| first_stage_box_predictor_conv_hyperparams { | |
| op: CONV | |
| regularizer { | |
| l2_regularizer { | |
| weight: 0.0 | |
| } | |
| } | |
| initializer { | |
| truncated_normal_initializer { | |
| stddev: 0.01 | |
| } | |
| } | |
| } | |
| first_stage_nms_score_threshold: 0.0 | |
| first_stage_nms_iou_threshold: 0.7 | |
| first_stage_max_proposals: 300 | |
| first_stage_localization_loss_weight: 2.0 | |
| first_stage_objectness_loss_weight: 1.0 | |
| initial_crop_size: 17 | |
| maxpool_kernel_size: 1 | |
| maxpool_stride: 1 | |
| second_stage_box_predictor { | |
| mask_rcnn_box_predictor { | |
| fc_hyperparams { | |
| op: FC | |
| regularizer { | |
| l2_regularizer { | |
| weight: 0.0 | |
| } | |
| } | |
| initializer { | |
| variance_scaling_initializer { | |
| factor: 1.0 | |
| uniform: true | |
| mode: FAN_AVG | |
| } | |
| } | |
| } | |
| use_dropout: false | |
| dropout_keep_probability: 1.0 | |
| } | |
| } | |
| second_stage_post_processing { | |
| batch_non_max_suppression { | |
| score_threshold: 0.3 | |
| iou_threshold: 0.6 | |
| max_detections_per_class: 100 | |
| max_total_detections: 100 | |
| } | |
| score_converter: SOFTMAX | |
| } | |
| second_stage_localization_loss_weight: 2.0 | |
| second_stage_classification_loss_weight: 1.0 | |
| } | |
| } | |
| train_config { | |
| batch_size: 1 | |
| data_augmentation_options { | |
| random_horizontal_flip { | |
| } | |
| } | |
| optimizer { | |
| momentum_optimizer { | |
| learning_rate { | |
| manual_step_learning_rate { | |
| initial_learning_rate: 0.0003 | |
| schedule { | |
| step: 900000 | |
| learning_rate: 3e-05 | |
| } | |
| schedule { | |
| step: 1200000 | |
| learning_rate: 3e-06 | |
| } | |
| } | |
| } | |
| momentum_optimizer_value: 0.9 | |
| } | |
| use_moving_average: false | |
| } | |
| gradient_clipping_by_norm: 10.0 | |
| fine_tune_checkpoint: "gs://te_object_detection/pre_trained_model/faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28/model.ckpt" | |
| from_detection_checkpoint: false | |
| num_steps: 40000 | |
| load_all_detection_checkpoint_vars: true | |
| fine_tune_checkpoint_type: "detection" | |
| } | |
| train_input_reader { | |
| label_map_path: "gs://te_object_detection/ytag/tf_label_map.pbtxt" | |
| tf_record_input_reader { | |
| input_path: "gs://te_object_detection/ytag/495-images-28-oct-YB-train.record" | |
| } | |
| } | |
| eval_config { | |
| num_examples: 8000 | |
| max_evals: 10 | |
| use_moving_averages: false | |
| } | |
| eval_input_reader { | |
| label_map_path: "gs://te_object_detection/ytag/tf_label_map.pbtxt" | |
| shuffle: true | |
| num_readers: 1 | |
| tf_record_input_reader { | |
| input_path: "gs://te_object_detection/ytag/80-images-28-oct-YB-test.record" | |
| } | |
| } | |