test: drop dead and commented code for cleanup
authorInki Dae <inki.dae@samsung.com>
Mon, 5 Oct 2020 05:45:51 +0000 (14:45 +0900)
committerInki Dae <inki.dae@samsung.com>
Mon, 5 Oct 2020 05:45:51 +0000 (14:45 +0900)
Change-Id: I23fbc30065b3f5491752a43feb9dcc9ac8407795
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/testsuites/stream_infer/stream_infer.c

index 55d7fcaef8120b482c2eea1bc4c44775140b0d00..b36518bc1bf9a9ae9a0029fc22b8e8afb3114cc3 100644 (file)
@@ -288,9 +288,6 @@ void int_handler(int sig)
                inference_loop = 0;
 
                g_signal_handler_disconnect(vrsink, handler_p);
-#if 0
-               g_signal_handler_disconnect(vcrssink, handler_gp);
-#endif
 
                gst_element_send_event(pipeline, gst_event_new_eos());
 
@@ -349,9 +346,6 @@ void _hand_pose_cb (
                humanSkeleton.pose[n].x = (int)(640.f*(float)locations->landmarks[n].point.x/(float)width);
                humanSkeleton.pose[n].y = (int)(480.f*(float)locations->landmarks[n].point.y/(float)height);
                humanSkeleton.scores[n] = 1.0f; /* confidences[n];*/
-
-               //printf("(%d,%d): %f\n", humanSkeleton.pose[n].x, humanSkeleton.pose[n].y, confidences[n]);
-               //printf("(%d,%d)\n", humanSkeleton.pose[n].x, humanSkeleton.pose[n].y);
        }
 
        humanSkeleton.label = label;
@@ -373,27 +367,6 @@ static void _hand_detected_cb (
         void *user_data) //user_data  can be mv_source?
 {
 
-#if 0
-       if (0 /*confidences[1] < thValNeck*/) {
-               printf("lost pose\n");
-               humanSkeleton.IsDetected = false;
-               humanSkeleton.isPrevPose = false;
-               return;
-       }
-       
-       printf("%d landmarks, %d crop\n", number_of_landmarks, poseCropSize);
-       for (int n = 0; n < number_of_landmarks; ++n) {
-
-               humanSkeleton.pose[n].x = (int)((float)(locations[n].x + poseRoi.point.x) / (float)poseCropSize * 640.f);
-               humanSkeleton.pose[n].y = (int)((float)(locations[n].y + poseRoi.point.y) / (float)poseCropSize * 480.f);
-               humanSkeleton.scores[n] = 1.0f; /* confidences[n];*/
-
-               //printf("(%d,%d): %f\n", humanSkeleton.pose[n].x, humanSkeleton.pose[n].y, confidences[n]);
-               printf("(%d,%d)\n", humanSkeleton.pose[n].x, humanSkeleton.pose[n].y);
-       }
-       humanSkeleton.IsDetected = true;
-#else
-
        if (number_of_hands <= 0) {
                humanSkeleton.IsDetected = false;
                humanSkeleton.label = -1;
@@ -407,7 +380,6 @@ static void _hand_detected_cb (
                                        56, 56, 21, 3);
 
        mv_inference_pose_landmark_detect(mv_src_p2, hp_mv_infer2, NULL,  _hand_pose_cb, NULL);
-#endif
        return;
 }
 
@@ -451,15 +423,6 @@ void _human_pose_cb (
        struct timespec diffspec = diff(current_data_p.s_time, e_time);
        unsigned long timeDiff = gettotalmillisec(diffspec);
        printf("elapsed time: %lu(ms)\n", timeDiff);
-
-//     user_stack_t *s = (user_stack_t *)user_data;
-
-//     struct timespec e_time;
-//     clock_gettime(CLOCK_MONOTONIC, &e_time);
-
-//     struct timespec diffspec = diff(s->s_time, e_time);
-//     unsigned long timeDiff = gettotalmillisec(diffspec);
-//     printf("elapsed time: %lu(ms)\n", timeDiff);
 }
 
 static gboolean
@@ -659,9 +622,6 @@ draw_overlay_pose (GstElement * overlay, cairo_t * cr, guint64 timestamp,
                cairo_show_text(cr, tmpText[1]);
                cairo_move_to(cr, 420.0, 90.0);
                cairo_show_text(cr, tmpText[2]);
-       
-
-
        } else { // wrong
                cairo_set_source_rgba(cr, 0.9, 0.1, 0.0, 0.7);
                cairo_move_to(cr, 420.0, 60.0);
@@ -966,7 +926,6 @@ int perform_tflite_hand_detection_AIC(mv_engine_config_h mv_engine_cfg)
     char *inputNodeName = "input";
     char *outputNodeNames[2] = {"mobilenetv2/boundingbox", "mobilenetv2/heatmap"};
 
-    //outputTensorData = (void*)calloc(56*56*21, sizeof(float));
     mv_engine_config_set_string_attribute(mv_engine_cfg,
                         MV_INFERENCE_MODEL_WEIGHT_FILE_PATH,
                         PE_TFLITE_AIC_1_WEIGHT_PATH);
@@ -1078,7 +1037,6 @@ int perform_tflite_hand_detection_AICLite3_1(mv_engine_config_h mv_engine_cfg)
     char *inputNodeName = "input";
     char *outputNodeNames[2] = {"ban_1_6/boundingbox", "ban_1_6/heatmap"};
 
-    //outputTensorData = (void*)calloc(56*56*21, sizeof(float));
     mv_engine_config_set_string_attribute(mv_engine_cfg,
                         MV_INFERENCE_MODEL_WEIGHT_FILE_PATH,
                         PE_TFLITE_AICLite3_1_WEIGHT_PATH);
@@ -1190,7 +1148,6 @@ int perform_tflite_hand_detection_AICLite(mv_engine_config_h mv_engine_cfg)
     char *inputNodeName = "input";
     char *outputNodeNames[2] = {"mobilenetv2/boundingbox", "mobilenetv2/heatmap"};
 
-    //outputTensorData = (void*)calloc(56*56*21, sizeof(float));
     mv_engine_config_set_string_attribute(mv_engine_cfg,
                         MV_INFERENCE_MODEL_WEIGHT_FILE_PATH,
                         PE_TFLITE_AICLite_1_WEIGHT_PATH);
@@ -1302,7 +1259,6 @@ int perform_tflite_hand_detection_AICLiteQ(mv_engine_config_h mv_engine_cfg)
     char *inputNodeName = "input";
     char *outputNodeNames[2] = {"mobilenetv2/boundingbox", "mobilenetv2/heatmap"};
 
-    //outputTensorData = (void*)calloc(56*56*21, sizeof(char));
     mv_engine_config_set_string_attribute(mv_engine_cfg,
                         MV_INFERENCE_MODEL_WEIGHT_FILE_PATH,
                         PE_TFLITE_AICLiteQ_1_WEIGHT_PATH);
@@ -1645,7 +1601,6 @@ static int app_create(void *data)
 
 
        if (ad->modelType == MODEL_TYPE_POSE_CPM) {
-               //err = perform_armnn_human_pose_cpm_configure(hp_mv_engine_cfg);
                err = perform_tflite_human_pose_cpm_configure(hp_mv_engine_cfg);
 
                mv_pose_create(&hpPoser);
@@ -1717,7 +1672,6 @@ static int app_create(void *data)
                filter = gst_element_factory_make("capsfilter", "filter");
        } else {
                source = gst_element_factory_make("filesrc", "src");
-               
                dbin = gst_element_factory_make("decodebin", "dbin");
                dscale = gst_element_factory_make("videoscale", "dscale");
                dconv = gst_element_factory_make("videoconvert", "dconv");
@@ -1741,10 +1695,6 @@ static int app_create(void *data)
        queue1 = gst_element_factory_make("queue", "queue1");
        queue2 = gst_element_factory_make("queue", "queue2");
 
-       if (0 /*ad->modelType == MODEL_TYPE_POSE_HAND_AIC*/) {
-               queue3 = gst_element_factory_make("queue", "queue3");
-       }
-
        // queue1 - videoscale - capsfilter -viedoeconvert - capsfilter - videorate - capsfilter -fakesink
        vscale = gst_element_factory_make("videoscale", "scale");
        vsfilter = gst_element_factory_make("capsfilter", "vsfilter");
@@ -1760,18 +1710,6 @@ static int app_create(void *data)
        sink = gst_element_factory_make("fpsdisplaysink", "vsink");
        sink2 = gst_element_factory_make("tizenwlsink", "vsink2");
 
-       // after detection, crop using video crop
-       // queue3 - videocrop - videoscale -capsfilter - videoconvert - capsfilter -fakesink
-
-       if (0/*ad->modelType == MODEL_TYPE_POSE_HAND_AIC*/) {
-               vcrop = gst_element_factory_make("videocrop", "crop");
-               vcrscale = gst_element_factory_make("videoscale", "crscale");
-               vcrsfilter = gst_element_factory_make("capsfilter", "vcrsfilter");
-               vcrsconv = gst_element_factory_make("videoconvert", "vcrsconvert");
-               vcrscfilter = gst_element_factory_make("capsfilter", "vcrscfilter");
-               vcrssink = gst_element_factory_make("fakesink", "vcrssink");
-       }
-
        if (!pipeline || !source ||
                !tee || !queue1 || !vscale || !vsfilter || !vconv || !vcfilter ||
                !vrate || !vrfilter || !vrsink ||
@@ -1780,12 +1718,6 @@ static int app_create(void *data)
                return -1;
        }
 
-       if (0 /*ad->modelType == MODEL_TYPE_POSE_HAND_AIC && (!pipeline || !queue3 
-               || !vcrop || !vcrscale || !vcrsfilter || !vcrsconv || !vcrscfilter || !vcrssink*/) {
-               printf(TEXT_RED "One element(queue3) might be not created. Exiting.\n" TEXT_RESET);
-               return -1;
-       }
-
        if (!ad->filename) {
                if (!filter) {
                        printf(TEXT_RED "One element might be not created. Existing.\n" TEXT_RESET);
@@ -1819,7 +1751,7 @@ static int app_create(void *data)
        } else {
                g_signal_connect (coverlay, "draw", G_CALLBACK (draw_overlay_hand), overlay_state);
        }
-       
+
        g_signal_connect (coverlay, "caps-changed", G_CALLBACK (prepare_overlay), overlay_state);
 
        if (!ad->filename) {
@@ -1847,16 +1779,6 @@ static int app_create(void *data)
        g_object_set(G_OBJECT(vcfilter), "caps", gst_caps_from_string("video/x-raw, format=RGB"), NULL);
        g_object_set(G_OBJECT(vrfilter), "caps", gst_caps_from_string("video/x-raw, framerate=15/1"), NULL);
 
-       //g_object_set(G_OBJECT(vrate), "drop-only", TRUE, NULL);
-
-       //g_object_set(G_OBJECT(queue2), "leaky", 2, NULL);
-#if 0
-       g_object_set(G_OBJECT(queue3), "max-size-buffers", 0, NULL);
-       g_object_set(G_OBJECT(queue3), "max-size-time", 0, NULL);
-       g_object_set(G_OBJECT(queue3), "max-size-bytes", 0, NULL);
-#endif
-       //g_object_set(G_OBJECT(queue3), "leaky", 2, NULL);
-
        // here to be continue
        printf("vrsink signal-handoffs\n");
        g_object_set(G_OBJECT(vrsink), "signal-handoffs", TRUE, NULL);
@@ -1888,13 +1810,6 @@ static int app_create(void *data)
                                        queue2, oconv, coverlay, sink,
                                        NULL);
 
-       if (0 /*ad->modelType == MODEL_TYPE_POSE_HAND_AIC*/) {
-               gst_bin_add_many(GST_BIN(pipeline),
-                                       queue3, vcrop, vcrscale, vcrsfilter, vcrsconv, vcrscfilter, vcrssink,
-                                       NULL);
-               gst_element_link_many (tee, queue3, vcrop, vcrscale, vcrsfilter, vcrsconv, vcrscfilter, vcrssink, NULL);
-       }
-
        /* link elements */
        if (!ad->filename) {
                gst_bin_add(GST_BIN(pipeline), filter);
@@ -1922,50 +1837,14 @@ static int app_create(void *data)
                // pose
                gst_element_link_many (tee, queue1, vrate, vrfilter, vconv, vcfilter, vscale, vsfilter, vrsink, NULL);
        }
-       
-       /* set the pipeline state to "playing" state */
-       //gst_element_set_state(pipeline, GST_STATE_PLAYING);
 
        /* loop */
        humanSkeleton.IsDetected = false;
        humanSkeleton.isPrevPose = false;
        humanSkeleton.label = -1;
        printf(TEXT_GREEN "Running.....\n" TEXT_RESET);
-       // GST_END
-#if 0
-       /* use gl backend */
-       elm_config_accel_preference_set("opengl");
-
-       /* create window */
-       //win = elm_win_add(NULL, PACKAGE, ELM_WIN_SPLASH );
-       win = elm_win_add(NULL, PACKAGE, ELM_WIN_BASIC);
-       if (win) {
-               elm_win_title_set(win, PACKAGE);
-               elm_win_borderless_set(win, EINA_TRUE);
-               elm_win_autodel_set(win, EINA_TRUE);
-               elm_win_alpha_set(win, EINA_FALSE);
-         evas_object_show(win);
-       }
-       elm_win_layer_set(win, 9);
-       elm_win_prop_focus_skip_set(win, EINA_TRUE);
-
-       ad->win = win;
-       g_win_id = win;
-       selected_win_id = g_win_id;
-
-       Evas_Object *bg = elm_bg_add(win);
-       elm_win_resize_object_add(win, bg);
-       evas_object_size_hint_min_set(bg, WIDTH, HEIGHT);
-       evas_object_size_hint_max_set(bg, WIDTH, HEIGHT);
-       evas_object_show(bg);
-
-       elm_win_activate(win);
-
-
-       evas_object_event_callback_add(win, EVAS_CALLBACK_RESIZE, win_resize_cb, NULL);
-#else
+
        gst_element_set_state (pipeline, GST_STATE_PLAYING);
-#endif
        return 0;
 }