From dd86a4a1c1986d358915cb5b81f118d08c144abf Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Mon, 11 Mar 2024 16:33:21 +0900
Subject: [PATCH 01/20] refactor: lidar_centerpoint

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../lidar_centerpoint/config/centerpoint.param.yaml   |  8 --------
 .../config/centerpoint_tiny.param.yaml                |  8 --------
 .../config/lidar_centerpoint_ml_package.param.yaml    | 11 +++++++++++
 .../launch/lidar_centerpoint.launch.xml               | 10 +++-------
 perception/lidar_centerpoint/src/node.cpp             | 10 +++++-----
 5 files changed, 19 insertions(+), 28 deletions(-)
 create mode 100644 perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml

diff --git a/perception/lidar_centerpoint/config/centerpoint.param.yaml b/perception/lidar_centerpoint/config/centerpoint.param.yaml
index a9c3174846d0d..db0683e2a77d0 100644
--- a/perception/lidar_centerpoint/config/centerpoint.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint.param.yaml
@@ -13,14 +13,6 @@
     iou_nms_search_distance_2d: 10.0
     iou_nms_threshold: 0.1
     yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-    score_threshold: 0.35
-    has_twist: false
     trt_precision: fp16
     densification_num_past_frames: 1
     densification_world_frame_id: map
-
-    # weight files
-    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
-    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
-    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
-    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
diff --git a/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml b/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
index 474d0e2b695ac..9288a5fc9d05d 100644
--- a/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
@@ -13,14 +13,6 @@
     iou_nms_search_distance_2d: 10.0
     iou_nms_threshold: 0.1
     yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-    score_threshold: 0.35
-    has_twist: false
     trt_precision: fp16
     densification_num_past_frames: 1
     densification_world_frame_id: map
-
-    # weight files
-    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
-    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
-    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
-    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
diff --git a/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml b/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
new file mode 100644
index 0000000000000..06b15cf4c1bf0
--- /dev/null
+++ b/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
@@ -0,0 +1,11 @@
+/**:
+  ros__parameters:
+    score_threshold: 0.35
+    has_twist: false
+    build_only: false
+
+    # weight files
+    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
+    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
+    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
+    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
diff --git a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
index 5489af2c8fb60..e27f630f57e82 100644
--- a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
+++ b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
@@ -6,8 +6,8 @@
   <arg name="model_name" default="centerpoint_tiny" description="options: `centerpoint` or `centerpoint_tiny`"/>
   <arg name="model_path" default="$(var data_path)/lidar_centerpoint"/>
   <arg name="model_param_path" default="$(find-pkg-share lidar_centerpoint)/config/$(var model_name).param.yaml"/>
+  <arg name="ml_package_param_path" default="$(find-pkg-share lidar_centerpoint)/config/lidar_centerpoint_ml_package.param.yaml"/>
   <arg name="class_remapper_param_path" default="$(find-pkg-share lidar_centerpoint)/config/detection_class_remapper.param.yaml"/>
-  <arg name="build_only" default="false" description="shutdown node after TensorRT engine file is built"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>
   <arg name="pointcloud_container_name" default="pointcloud_container" description="pointcloud_container name"/>
@@ -18,10 +18,8 @@
         <remap from="~/input/pointcloud" to="$(var input/pointcloud)"/>
         <remap from="~/output/objects" to="$(var output/objects)"/>
         <param from="$(var model_param_path)" allow_substs="true"/>
+        <param from="$(var ml_package_param_path)" allow_substs="true"/>
         <param from="$(var class_remapper_param_path)"/>
-
-        <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6167 -->
-        <param name="build_only" value="$(var build_only)"/>
       </composable_node>
     </load_composable_node>
   </group>
@@ -30,10 +28,8 @@
       <remap from="~/input/pointcloud" to="$(var input/pointcloud)"/>
       <remap from="~/output/objects" to="$(var output/objects)"/>
       <param from="$(var model_param_path)" allow_substs="true"/>
+      <param from="$(var ml_package_param_path)" allow_substs="true"/>
       <param from="$(var class_remapper_param_path)"/>
-
-      <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6167 -->
-      <param name="build_only" value="$(var build_only)"/>
     </node>
   </group>
 </launch>
diff --git a/perception/lidar_centerpoint/src/node.cpp b/perception/lidar_centerpoint/src/node.cpp
index d370a60a26aa5..c4b02d4c8d143 100644
--- a/perception/lidar_centerpoint/src/node.cpp
+++ b/perception/lidar_centerpoint/src/node.cpp
@@ -39,23 +39,23 @@ LidarCenterPointNode::LidarCenterPointNode(const rclcpp::NodeOptions & node_opti
 : Node("lidar_center_point", node_options), tf_buffer_(this->get_clock())
 {
   const float score_threshold =
-    static_cast<float>(this->declare_parameter<double>("score_threshold", 0.35));
+    static_cast<float>(this->declare_parameter<double>("score_threshold"));
   const float circle_nms_dist_threshold =
     static_cast<float>(this->declare_parameter<double>("circle_nms_dist_threshold"));
   const auto yaw_norm_thresholds =
     this->declare_parameter<std::vector<double>>("yaw_norm_thresholds");
   const std::string densification_world_frame_id =
-    this->declare_parameter("densification_world_frame_id", "map");
+    this->declare_parameter<std::string>("densification_world_frame_id");
   const int densification_num_past_frames =
-    this->declare_parameter("densification_num_past_frames", 1);
-  const std::string trt_precision = this->declare_parameter("trt_precision", "fp16");
+    this->declare_parameter<int>("densification_num_past_frames");
+  const std::string trt_precision = this->declare_parameter<std::string>("trt_precision");
   const std::string encoder_onnx_path = this->declare_parameter<std::string>("encoder_onnx_path");
   const std::string encoder_engine_path =
     this->declare_parameter<std::string>("encoder_engine_path");
   const std::string head_onnx_path = this->declare_parameter<std::string>("head_onnx_path");
   const std::string head_engine_path = this->declare_parameter<std::string>("head_engine_path");
   class_names_ = this->declare_parameter<std::vector<std::string>>("class_names");
-  has_twist_ = this->declare_parameter("has_twist", false);
+  has_twist_ = this->declare_parameter<bool>("has_twist");
   const std::size_t point_feature_size =
     static_cast<std::size_t>(this->declare_parameter<std::int64_t>("point_feature_size"));
   const std::size_t max_voxel_size =

From d7b661a77d173351302b2a106c30aa376dba67d0 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Mon, 11 Mar 2024 16:34:04 +0900
Subject: [PATCH 02/20] refactor: pointpainting

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/pointpainting.param.yaml             | 15 ---------------
 .../config/pointpainting_ml_package.param.yaml  | 17 +++++++++++++++++
 .../launch/pointpainting_fusion.launch.xml      | 15 ++++++---------
 3 files changed, 23 insertions(+), 24 deletions(-)
 create mode 100644 perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml

diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
index 3abaffb243d96..25de68e50c6f1 100755
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -1,11 +1,6 @@
 /**:
   ros__parameters:
     trt_precision: fp16
-    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
-    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
-    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
-    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
-
     model_params:
       class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
       paint_class_names: ["CAR", "BICYCLE", "PEDESTRIAN"]
@@ -20,13 +15,3 @@
     densification_params:
       world_frame_id: "map"
       num_past_frames: 0
-    post_process_params:
-      # post-process params
-      circle_nms_dist_threshold: 0.3
-      iou_nms_target_class_names: ["CAR"]
-      iou_nms_search_distance_2d: 10.0
-      iou_nms_threshold: 0.1
-      score_threshold: 0.35
-    omp_params:
-      # omp params
-      num_threads: 1
diff --git a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
new file mode 100644
index 0000000000000..83d2a0fed26b2
--- /dev/null
+++ b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
@@ -0,0 +1,17 @@
+/**:
+  ros__parameters:
+    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
+    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
+    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
+    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
+    build_only: false
+    post_process_params:
+      # post-process params
+      circle_nms_dist_threshold: 0.3
+      iou_nms_target_class_names: ["CAR"]
+      iou_nms_search_distance_2d: 10.0
+      iou_nms_threshold: 0.1
+      score_threshold: 0.35
+    omp_params:
+      # omp params
+      num_threads: 1
diff --git a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
index 9236d7d789ed1..2bce8b3dc97aa 100644
--- a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
+++ b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
@@ -22,9 +22,12 @@
   <arg name="model_name" default="pointpainting" description="options: `pointpainting`"/>
   <arg name="model_path" default="$(var data_path)/image_projection_based_fusion"/>
   <arg name="model_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/$(var model_name).param.yaml"/>
+  <arg
+    name="ml_package_param_path"
+    default="/home/autoware/autoware/src/launcher/autoware_launch/autoware_launch/config/perception/object_recognition/detection/lidar_model/pointpainting_ml_package.param.yaml"
+  />
   <arg name="class_remapper_param_path" default="$(find-pkg-share lidar_centerpoint)/config/detection_class_remapper.param.yaml"/>
   <arg name="sync_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/roi_sync.param.yaml"/>
-  <arg name="build_only" default="false" description="shutdown node after TensorRT engine file is built"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>
   <arg name="pointcloud_container_name" default="pointcloud_container" description="pointcloud_container name"/>
@@ -48,13 +51,10 @@
         <remap from="~/input/pointcloud" to="$(var input/pointcloud)"/>
         <remap from="~/output/objects" to="$(var output/objects)"/>
         <param from="$(var model_param_path)" allow_substs="true"/>
+        <param from="$(var ml_package_param_path)" allow_substs="true"/>
         <param from="$(var sync_param_path)"/>
         <param from="$(var class_remapper_param_path)"/>
         <param name="rois_number" value="$(var input/rois_number)"/>
-
-        <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6169 -->
-        <param name="build_only" value="$(var build_only)"/>
-
         <!-- rois, camera and info -->
         <param name="input/rois0" value="$(var input/rois0)"/>
         <param name="input/camera_info0" value="$(var input/camera_info0)"/>
@@ -89,13 +89,10 @@
       <remap from="~/input/pointcloud" to="$(var input/pointcloud)"/>
       <remap from="~/output/objects" to="$(var output/objects)"/>
       <param from="$(var model_param_path)" allow_substs="true"/>
+      <param from="$(var ml_package_param_path)" allow_substs="true"/>
       <param from="$(var sync_param_path)"/>
       <param from="$(var class_remapper_param_path)"/>
       <param name="rois_number" value="$(var input/rois_number)"/>
-
-      <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6169 -->
-      <param name="build_only" value="$(var build_only)"/>
-
       <!-- rois, camera and info -->
       <param name="input/rois0" value="$(var input/rois0)"/>
       <param name="input/camera_info0" value="$(var input/camera_info0)"/>

From cfe563ebe16af794c6839d278e1128a18785b771 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Mon, 11 Mar 2024 17:47:13 +0900
Subject: [PATCH 03/20] chore: fix launch

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../detection/detector/camera_lidar_detector.launch.xml     | 1 +
 .../detection/detector/lidar_dnn_detector.launch.xml        | 6 +++---
 launch/tier4_perception_launch/launch/perception.launch.xml | 4 ++--
 3 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
index ddcbbbaeae107..a90d04c567f06 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
@@ -83,6 +83,7 @@
       <arg name="model_name" value="$(var lidar_detection_model)"/>
       <arg name="model_path" value="$(var pointpainting_model_path)"/>
       <arg name="model_param_path" value="$(var lidar_model_param_path)/$(var lidar_detection_model).param.yaml"/>
+      <arg name="ml_package_param_path" value="$(var ml_package_param_path)/pointpainting_ml_package.param.yaml"/>
       <arg name="class_remapper_param_path" value="$(var lidar_model_param_path)/detection_class_remapper.param.yaml"/>
 
       <arg name="use_pointcloud_container" value="true"/>
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
index f9b65fd5c30eb..e8d08d964a2fb 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
@@ -7,7 +7,6 @@
   <!-- Lidar detector centerpoint parameters -->
   <arg name="centerpoint_model_name" default="centerpoint_tiny"/>
   <arg name="centerpoint_model_path" default="$(var data_path)/lidar_centerpoint"/>
-  <arg name="lidar_model_param_path" default="$(find-pkg-share lidar_centerpoint)/config"/>
 
   <!-- CenterPoint -->
   <group if="$(eval &quot;'$(var lidar_detection_model)'=='centerpoint'&quot;)">
@@ -18,8 +17,9 @@
         <arg name="output/objects" value="objects"/>
         <arg name="model_name" value="$(var centerpoint_model_name)"/>
         <arg name="model_path" value="$(var centerpoint_model_path)"/>
-        <arg name="model_param_path" value="$(var lidar_model_param_path)/$(var centerpoint_model_name).param.yaml"/>
-        <arg name="class_remapper_param_path" value="$(var lidar_model_param_path)/detection_class_remapper.param.yaml"/>
+        <arg name="model_param_path" value="$(var centerpoint_model_path)/$(var centerpoint_model_name).param.yaml"/>
+        <arg name="ml_package_param_path" value="$(var ml_package_param_path)/lidar_centerpoint_ml_package.param.yaml"/>
+        <arg name="class_remapper_param_path" value="$(var centerpoint_model_path)/detection_class_remapper.param.yaml"/>
 
         <arg name="use_pointcloud_container" value="true"/>
         <arg name="pointcloud_container_name" value="$(var pointcloud_container_name)"/>
diff --git a/launch/tier4_perception_launch/launch/perception.launch.xml b/launch/tier4_perception_launch/launch/perception.launch.xml
index c6213ee313c2b..2556c14cae244 100644
--- a/launch/tier4_perception_launch/launch/perception.launch.xml
+++ b/launch/tier4_perception_launch/launch/perception.launch.xml
@@ -13,7 +13,7 @@
   <arg name="object_recognition_detection_roi_cluster_fusion_param_path"/>
   <arg name="object_recognition_detection_roi_pointcloud_fusion_param_path"/>
   <arg name="object_recognition_detection_roi_detected_object_fusion_param_path"/>
-  <arg name="object_recognition_detection_lidar_model_param_path"/>
+  <arg name="object_recognition_detection_ml_package_param_path"/>
   <arg name="object_recognition_detection_radar_lanelet_filtering_range_param_path"/>
   <arg name="object_recognition_detection_radar_crossing_objects_noise_filter_param_path"/>
   <arg name="object_recognition_detection_radar_object_clustering_param_path"/>
@@ -183,7 +183,7 @@
           <arg name="roi_cluster_fusion_param_path" value="$(var object_recognition_detection_roi_cluster_fusion_param_path)"/>
           <arg name="roi_pointcloud_fusion_param_path" value="$(var object_recognition_detection_roi_pointcloud_fusion_param_path)"/>
           <arg name="roi_detected_object_fusion_param_path" value="$(var object_recognition_detection_roi_detected_object_fusion_param_path)"/>
-          <arg name="lidar_model_param_path" value="$(var object_recognition_detection_lidar_model_param_path)"/>
+          <arg name="ml_package_param_path" value="$(var object_recognition_detection_ml_package_param_path)"/>
           <arg name="euclidean_param_path" value="$(var object_recognition_detection_euclidean_cluster_param_path)"/>
           <arg name="outlier_param_path" value="$(var object_recognition_detection_outlier_param_path)"/>
           <arg name="voxel_grid_based_euclidean_param_path" value="$(var object_recognition_detection_voxel_grid_based_euclidean_cluster_param_path)"/>

From 772ba9abc74115c43ad4121f210a8576c36509d2 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Mon, 11 Mar 2024 19:21:14 +0900
Subject: [PATCH 04/20] chore: fix launch

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../detection/detector/camera_lidar_detector.launch.xml  | 4 ++--
 .../detection/detector/lidar_dnn_detector.launch.xml     | 3 +--
 .../launch/pointpainting_fusion.launch.xml               | 9 +++------
 .../launch/lidar_centerpoint.launch.xml                  | 4 ++--
 4 files changed, 8 insertions(+), 12 deletions(-)

diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
index a90d04c567f06..55153820ed6bc 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
@@ -82,9 +82,9 @@
       <arg name="output/objects" value="objects"/>
       <arg name="model_name" value="$(var lidar_detection_model)"/>
       <arg name="model_path" value="$(var pointpainting_model_path)"/>
-      <arg name="model_param_path" value="$(var lidar_model_param_path)/$(var lidar_detection_model).param.yaml"/>
+      <arg name="model_param_path" value="$(var model_path)/$(var lidar_detection_model).param.yaml"/>
       <arg name="ml_package_param_path" value="$(var ml_package_param_path)/pointpainting_ml_package.param.yaml"/>
-      <arg name="class_remapper_param_path" value="$(var lidar_model_param_path)/detection_class_remapper.param.yaml"/>
+      <arg name="class_remapper_param_path" value="$(var model_path)/detection_class_remapper.param.yaml"/>
 
       <arg name="use_pointcloud_container" value="true"/>
       <arg name="pointcloud_container_name" value="$(var pointcloud_container_name)"/>
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
index e8d08d964a2fb..20f0ed32489f3 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
@@ -6,7 +6,6 @@
 
   <!-- Lidar detector centerpoint parameters -->
   <arg name="centerpoint_model_name" default="centerpoint_tiny"/>
-  <arg name="centerpoint_model_path" default="$(var data_path)/lidar_centerpoint"/>
 
   <!-- CenterPoint -->
   <group if="$(eval &quot;'$(var lidar_detection_model)'=='centerpoint'&quot;)">
@@ -17,7 +16,7 @@
         <arg name="output/objects" value="objects"/>
         <arg name="model_name" value="$(var centerpoint_model_name)"/>
         <arg name="model_path" value="$(var centerpoint_model_path)"/>
-        <arg name="model_param_path" value="$(var centerpoint_model_path)/$(var centerpoint_model_name).param.yaml"/>
+        <arg name="model_param_path" value="$(var model_path)/$(var centerpoint_model_name).param.yaml"/>
         <arg name="ml_package_param_path" value="$(var ml_package_param_path)/lidar_centerpoint_ml_package.param.yaml"/>
         <arg name="class_remapper_param_path" value="$(var centerpoint_model_path)/detection_class_remapper.param.yaml"/>
 
diff --git a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
index 2bce8b3dc97aa..2cd6130cf1be3 100644
--- a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
+++ b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
@@ -21,12 +21,9 @@
   <arg name="data_path" default="$(env HOME)/autoware_data" description="packages data and artifacts directory path"/>
   <arg name="model_name" default="pointpainting" description="options: `pointpainting`"/>
   <arg name="model_path" default="$(var data_path)/image_projection_based_fusion"/>
-  <arg name="model_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/$(var model_name).param.yaml"/>
-  <arg
-    name="ml_package_param_path"
-    default="/home/autoware/autoware/src/launcher/autoware_launch/autoware_launch/config/perception/object_recognition/detection/lidar_model/pointpainting_ml_package.param.yaml"
-  />
-  <arg name="class_remapper_param_path" default="$(find-pkg-share lidar_centerpoint)/config/detection_class_remapper.param.yaml"/>
+  <arg name="model_param_path" default="$(find-pkg-share model_path)/$(var model_name).param.yaml"/>
+  <arg name="ml_package_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/pointpainting_ml_package.param.yaml"/>
+  <arg name="class_remapper_param_path" default="$(var model_path)/detection_class_remapper.param.yaml"/>
   <arg name="sync_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/roi_sync.param.yaml"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>
diff --git a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
index e27f630f57e82..09d248974cd21 100644
--- a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
+++ b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
@@ -5,9 +5,9 @@
   <arg name="data_path" default="$(env HOME)/autoware_data" description="packages data and artifacts directory path"/>
   <arg name="model_name" default="centerpoint_tiny" description="options: `centerpoint` or `centerpoint_tiny`"/>
   <arg name="model_path" default="$(var data_path)/lidar_centerpoint"/>
-  <arg name="model_param_path" default="$(find-pkg-share lidar_centerpoint)/config/$(var model_name).param.yaml"/>
+  <arg name="model_param_path" default="$(var model_path)/$(var model_name).param.yaml"/>
   <arg name="ml_package_param_path" default="$(find-pkg-share lidar_centerpoint)/config/lidar_centerpoint_ml_package.param.yaml"/>
-  <arg name="class_remapper_param_path" default="$(find-pkg-share lidar_centerpoint)/config/detection_class_remapper.param.yaml"/>
+  <arg name="class_remapper_param_path" default="$(var model_path)/detection_class_remapper.param.yaml"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>
   <arg name="pointcloud_container_name" default="pointcloud_container" description="pointcloud_container name"/>

From 0109746b212f7989e1685365f5e6d54ae89d455e Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 14 Mar 2024 00:57:52 +0900
Subject: [PATCH 05/20] chore: rearrange params

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/pointpainting.param.yaml                       | 6 +++---
 .../config/pointpainting_ml_package.param.yaml            | 6 +++---
 .../src/pointpainting_fusion/node.cpp                     | 4 ++--
 .../lidar_centerpoint/config/centerpoint.param.yaml       | 7 -------
 .../lidar_centerpoint/config/centerpoint_tiny.param.yaml  | 7 -------
 .../config/lidar_centerpoint_ml_package.param.yaml        | 8 ++++++++
 6 files changed, 16 insertions(+), 22 deletions(-)

diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
index 25de68e50c6f1..dcf90eca8a045 100755
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -1,6 +1,5 @@
 /**:
   ros__parameters:
-    trt_precision: fp16
     model_params:
       class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
       paint_class_names: ["CAR", "BICYCLE", "PEDESTRIAN"]
@@ -10,8 +9,9 @@
       voxel_size: [0.32, 0.32, 8.0]
       downsample_factor: 1
       encoder_in_feature_size: 12
-      yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-      has_twist: false
     densification_params:
       world_frame_id: "map"
       num_past_frames: 0
+    omp_params:
+      # omp params
+      num_threads: 1
diff --git a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
index 83d2a0fed26b2..5183d4a44e1b9 100644
--- a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
@@ -5,6 +5,7 @@
     head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
     head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
     build_only: false
+    trt_precision: fp16
     post_process_params:
       # post-process params
       circle_nms_dist_threshold: 0.3
@@ -12,6 +13,5 @@
       iou_nms_search_distance_2d: 10.0
       iou_nms_threshold: 0.1
       score_threshold: 0.35
-    omp_params:
-      # omp params
-      num_threads: 1
+      yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
+      has_twist: false
diff --git a/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp b/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp
index 14783a46ba8b4..cf93feb207f9d 100644
--- a/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp
+++ b/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp
@@ -102,7 +102,7 @@ PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & opt
   const float circle_nms_dist_threshold = static_cast<float>(
     this->declare_parameter<double>("post_process_params.circle_nms_dist_threshold"));
   const auto yaw_norm_thresholds =
-    this->declare_parameter<std::vector<double>>("model_params.yaw_norm_thresholds");
+    this->declare_parameter<std::vector<double>>("post_process_params.yaw_norm_thresholds");
   // densification param
   const std::string densification_world_frame_id =
     this->declare_parameter<std::string>("densification_params.world_frame_id");
@@ -140,7 +140,7 @@ PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & opt
       isClassTable_.erase(cls);
     }
   }
-  has_twist_ = this->declare_parameter<bool>("model_params.has_twist");
+  has_twist_ = this->declare_parameter<bool>("post_process_params.has_twist");
   const std::size_t point_feature_size = static_cast<std::size_t>(
     this->declare_parameter<std::int64_t>("model_params.point_feature_size"));
   const std::size_t max_voxel_size =
diff --git a/perception/lidar_centerpoint/config/centerpoint.param.yaml b/perception/lidar_centerpoint/config/centerpoint.param.yaml
index db0683e2a77d0..398b28d08d025 100644
--- a/perception/lidar_centerpoint/config/centerpoint.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint.param.yaml
@@ -7,12 +7,5 @@
     voxel_size: [0.32, 0.32, 10.0]
     downsample_factor: 1
     encoder_in_feature_size: 9
-    # post-process params
-    circle_nms_dist_threshold: 0.5
-    iou_nms_target_class_names: ["CAR"]
-    iou_nms_search_distance_2d: 10.0
-    iou_nms_threshold: 0.1
-    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-    trt_precision: fp16
     densification_num_past_frames: 1
     densification_world_frame_id: map
diff --git a/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml b/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
index 9288a5fc9d05d..b7055936b098f 100644
--- a/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
@@ -7,12 +7,5 @@
     voxel_size: [0.32, 0.32, 10.0]
     downsample_factor: 2
     encoder_in_feature_size: 9
-    # post-process params
-    circle_nms_dist_threshold: 0.5
-    iou_nms_target_class_names: ["CAR"]
-    iou_nms_search_distance_2d: 10.0
-    iou_nms_threshold: 0.1
-    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-    trt_precision: fp16
     densification_num_past_frames: 1
     densification_world_frame_id: map
diff --git a/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml b/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
index 06b15cf4c1bf0..60576d3139523 100644
--- a/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
+++ b/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
@@ -9,3 +9,11 @@
     encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
     head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
     head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
+
+    # post-process params
+    circle_nms_dist_threshold: 0.5
+    iou_nms_target_class_names: ["CAR"]
+    iou_nms_search_distance_2d: 10.0
+    iou_nms_threshold: 0.1
+    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
+    trt_precision: fp16

From 675a28e17e6358532ff4ea0e003b1b6d05486ba9 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 14 Mar 2024 08:56:46 +0900
Subject: [PATCH 06/20] fix: json-schema-check error

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../schema/pointpainting.schema.json          | 52 +----------
 .../pointpainting_ml_package.schema.json      | 86 +++++++++++++++++++
 2 files changed, 87 insertions(+), 51 deletions(-)
 create mode 100644 perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json

diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
index 036628d72e70a..1d1fcb5b0ad16 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
@@ -52,18 +52,6 @@
               "type": "integer",
               "description": "A size of encoder input feature channels.",
               "default": 12
-            },
-            "yaw_norm_thresholds": {
-              "type": "array",
-              "description": "An array of distance threshold values of norm of yaw [rad].",
-              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
-              "minimum": 0.0,
-              "maximum": 1.0
-            },
-            "has_twist": {
-              "type": "boolean",
-              "description": "Indicates whether the model outputs twist value.",
-              "default": false
             }
           }
         },
@@ -84,44 +72,6 @@
             }
           }
         },
-        "post_process_params": {
-          "type": "object",
-          "properties": {
-            "score_threshold": {
-              "type": "number",
-              "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
-              "default": 0.4,
-              "minimum": 0.0,
-              "maximum": 1.0
-            },
-            "circle_nms_dist_threshold": {
-              "type": "number",
-              "description": "",
-              "default": 0.3,
-              "minimum": 0.0,
-              "maximum": 1.0
-            },
-            "iou_nms_target_class_names": {
-              "type": "array",
-              "description": "An array of class names to be target in NMS.",
-              "default": ["CAR"],
-              "uniqueItems": true
-            },
-            "iou_search_distance_2d": {
-              "type": "number",
-              "description": "A maximum distance value to search the nearest objects.",
-              "default": 10.0,
-              "minimum": 0.0
-            },
-            "iou_nms_threshold": {
-              "type": "number",
-              "description": "A threshold value of NMS using IoU score.",
-              "default": 0.1,
-              "minimum": 0.0,
-              "maximum": 1.0
-            }
-          }
-        },
         "omp_params": {
           "type": "object",
           "properties": {
@@ -134,7 +84,7 @@
           }
         }
       },
-      "required": ["model_params", "densification_params", "post_process_params", "omp_params"]
+      "required": ["model_params", "densification_params", "omp_params"]
     }
   },
   "properties": {
diff --git a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
new file mode 100644
index 0000000000000..272ad26e38b30
--- /dev/null
+++ b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
@@ -0,0 +1,86 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Parameters for Point Painting Fusion Node",
+  "type": "object",
+  "definitions": {
+    "pointpainting_ml_package": {
+      "type": "object",
+      "properties": {
+        "build_only": {
+          "type": "boolean",
+          "description": "shutdown the node after TensorRT engine file is built.",
+          "default": false
+        },
+        "trt_precision": {
+          "type": "string",
+          "description": "TensorRT inference precision.",
+          "default": "fp16",
+          "enum": ["fp32", "fp16"]
+        },
+        "post_process_params": {
+          "type": "object",
+          "properties": {
+            "score_threshold": {
+              "type": "number",
+              "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
+              "default": 0.4,
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "yaw_norm_thresholds": {
+              "type": "array",
+              "description": "An array of distance threshold values of norm of yaw [rad].",
+              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "circle_nms_dist_threshold": {
+              "type": "number",
+              "description": "",
+              "default": 0.3,
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "iou_nms_target_class_names": {
+              "type": "array",
+              "description": "An array of class names to be target in NMS.",
+              "default": ["CAR"],
+              "uniqueItems": true
+            },
+            "iou_search_distance_2d": {
+              "type": "number",
+              "description": "A maximum distance value to search the nearest objects.",
+              "default": 10.0,
+              "minimum": 0.0
+            },
+            "iou_nms_threshold": {
+              "type": "number",
+              "description": "A threshold value of NMS using IoU score.",
+              "default": 0.1,
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "has_twist": {
+              "type": "boolean",
+              "description": "Indicates whether the model outputs twist value.",
+              "default": false
+            }
+          }
+        }
+      },
+      "required": ["post_process_params"]
+    }
+  },
+  "properties": {
+    "/**": {
+      "type": "object",
+      "properties": {
+        "ros__parameters": {
+          "$ref": "#/definitions/pointpainting_ml_package"
+        }
+      },
+      "required": ["ros__parameters"]
+    }
+  },
+  "required": ["/**"]
+}

From 0ef230e691771ca148dbe47cb86d3238529cfd20 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 14 Mar 2024 18:29:06 +0900
Subject: [PATCH 07/20] fix: default param

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/pointpainting.param.yaml                             | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
index dcf90eca8a045..137925bd95830 100755
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -11,7 +11,7 @@
       encoder_in_feature_size: 12
     densification_params:
       world_frame_id: "map"
-      num_past_frames: 0
+      num_past_frames: 1
     omp_params:
       # omp params
       num_threads: 1

From b81ec45ca2d7cf954e6c5a682a30c21e769ae76f Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Sun, 17 Mar 2024 21:43:44 +0900
Subject: [PATCH 08/20] refactor: rename param file

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../detector/camera_lidar_detector.launch.xml |   4 +-
 .../detector/lidar_dnn_detector.launch.xml    |   6 +-
 .../launch/perception.launch.xml              |   4 +-
 .../config/pointpainting.param.yaml           |  27 ++--
 .../pointpainting_ml_package.param.yaml       |  27 ++--
 .../docs/pointpainting-fusion.md              |   4 +-
 .../launch/pointpainting_fusion.launch.xml    |   4 +-
 .../schema/pointpainting.schema.json          | 114 ++++++++---------
 .../pointpainting_ml_package.schema.json      | 116 ++++++++++--------
 .../config/centerpoint.param.yaml             |  25 ++--
 .../config/centerpoint_ml_package.param.yaml  |   9 ++
 ...=> centerpoint_tiny_ml_package.param.yaml} |   2 -
 .../lidar_centerpoint_ml_package.param.yaml   |  19 ---
 .../launch/lidar_centerpoint.launch.xml       |   4 +-
 14 files changed, 182 insertions(+), 183 deletions(-)
 mode change 100755 => 100644 perception/image_projection_based_fusion/config/pointpainting.param.yaml
 mode change 100644 => 100755 perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
 create mode 100644 perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
 rename perception/lidar_centerpoint/config/{centerpoint_tiny.param.yaml => centerpoint_tiny_ml_package.param.yaml} (79%)
 delete mode 100644 perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml

diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
index 55153820ed6bc..758918e32f1d5 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/camera_lidar_detector.launch.xml
@@ -82,8 +82,8 @@
       <arg name="output/objects" value="objects"/>
       <arg name="model_name" value="$(var lidar_detection_model)"/>
       <arg name="model_path" value="$(var pointpainting_model_path)"/>
-      <arg name="model_param_path" value="$(var model_path)/$(var lidar_detection_model).param.yaml"/>
-      <arg name="ml_package_param_path" value="$(var ml_package_param_path)/pointpainting_ml_package.param.yaml"/>
+      <arg name="model_param_path" value="$(var lidar_model_param_path)/pointpainting.param.yaml"/>
+      <arg name="ml_package_param_path" value="$(var model_path)/pointpainting_ml_package.param.yaml"/>
       <arg name="class_remapper_param_path" value="$(var model_path)/detection_class_remapper.param.yaml"/>
 
       <arg name="use_pointcloud_container" value="true"/>
diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
index 20f0ed32489f3..f8fad9ef056d8 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
@@ -16,9 +16,9 @@
         <arg name="output/objects" value="objects"/>
         <arg name="model_name" value="$(var centerpoint_model_name)"/>
         <arg name="model_path" value="$(var centerpoint_model_path)"/>
-        <arg name="model_param_path" value="$(var model_path)/$(var centerpoint_model_name).param.yaml"/>
-        <arg name="ml_package_param_path" value="$(var ml_package_param_path)/lidar_centerpoint_ml_package.param.yaml"/>
-        <arg name="class_remapper_param_path" value="$(var centerpoint_model_path)/detection_class_remapper.param.yaml"/>
+        <arg name="model_param_path" value="$(var lidar_model_param_path)/centerpoint.param.yaml"/>
+        <arg name="ml_package_param_path" value="$(var model_path)/$(var centerpoint_model_name)_ml_package.param.yaml"/>
+        <arg name="class_remapper_param_path" value="$(var model_path)/detection_class_remapper.param.yaml"/>
 
         <arg name="use_pointcloud_container" value="true"/>
         <arg name="pointcloud_container_name" value="$(var pointcloud_container_name)"/>
diff --git a/launch/tier4_perception_launch/launch/perception.launch.xml b/launch/tier4_perception_launch/launch/perception.launch.xml
index 2556c14cae244..c6213ee313c2b 100644
--- a/launch/tier4_perception_launch/launch/perception.launch.xml
+++ b/launch/tier4_perception_launch/launch/perception.launch.xml
@@ -13,7 +13,7 @@
   <arg name="object_recognition_detection_roi_cluster_fusion_param_path"/>
   <arg name="object_recognition_detection_roi_pointcloud_fusion_param_path"/>
   <arg name="object_recognition_detection_roi_detected_object_fusion_param_path"/>
-  <arg name="object_recognition_detection_ml_package_param_path"/>
+  <arg name="object_recognition_detection_lidar_model_param_path"/>
   <arg name="object_recognition_detection_radar_lanelet_filtering_range_param_path"/>
   <arg name="object_recognition_detection_radar_crossing_objects_noise_filter_param_path"/>
   <arg name="object_recognition_detection_radar_object_clustering_param_path"/>
@@ -183,7 +183,7 @@
           <arg name="roi_cluster_fusion_param_path" value="$(var object_recognition_detection_roi_cluster_fusion_param_path)"/>
           <arg name="roi_pointcloud_fusion_param_path" value="$(var object_recognition_detection_roi_pointcloud_fusion_param_path)"/>
           <arg name="roi_detected_object_fusion_param_path" value="$(var object_recognition_detection_roi_detected_object_fusion_param_path)"/>
-          <arg name="ml_package_param_path" value="$(var object_recognition_detection_ml_package_param_path)"/>
+          <arg name="lidar_model_param_path" value="$(var object_recognition_detection_lidar_model_param_path)"/>
           <arg name="euclidean_param_path" value="$(var object_recognition_detection_euclidean_cluster_param_path)"/>
           <arg name="outlier_param_path" value="$(var object_recognition_detection_outlier_param_path)"/>
           <arg name="voxel_grid_based_euclidean_param_path" value="$(var object_recognition_detection_voxel_grid_based_euclidean_cluster_param_path)"/>
diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
old mode 100755
new mode 100644
index 137925bd95830..224924e5de52b
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -1,17 +1,20 @@
 /**:
   ros__parameters:
-    model_params:
-      class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
-      paint_class_names: ["CAR", "BICYCLE", "PEDESTRIAN"]
-      point_feature_size: 7 # x, y, z, time-lag and car, pedestrian, bicycle
-      max_voxel_size: 40000
-      point_cloud_range: [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0]
-      voxel_size: [0.32, 0.32, 8.0]
-      downsample_factor: 1
-      encoder_in_feature_size: 12
+    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
+    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
+    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
+    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
+    build_only: false
+    trt_precision: fp16
+    post_process_params:
+      # post-process params
+      circle_nms_dist_threshold: 0.3
+      iou_nms_target_class_names: ["CAR"]
+      iou_nms_search_distance_2d: 10.0
+      iou_nms_threshold: 0.1
+      score_threshold: 0.35
+      yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
+      has_twist: false
     densification_params:
       world_frame_id: "map"
       num_past_frames: 1
-    omp_params:
-      # omp params
-      num_threads: 1
diff --git a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
old mode 100644
new mode 100755
index 5183d4a44e1b9..61d2124afb17a
--- a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
@@ -1,17 +1,14 @@
 /**:
   ros__parameters:
-    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
-    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
-    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
-    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
-    build_only: false
-    trt_precision: fp16
-    post_process_params:
-      # post-process params
-      circle_nms_dist_threshold: 0.3
-      iou_nms_target_class_names: ["CAR"]
-      iou_nms_search_distance_2d: 10.0
-      iou_nms_threshold: 0.1
-      score_threshold: 0.35
-      yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-      has_twist: false
+    model_params:
+      class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
+      paint_class_names: ["CAR", "BICYCLE", "PEDESTRIAN"]
+      point_feature_size: 7 # x, y, z, time-lag and car, pedestrian, bicycle
+      max_voxel_size: 40000
+      point_cloud_range: [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0]
+      voxel_size: [0.32, 0.32, 8.0]
+      downsample_factor: 1
+      encoder_in_feature_size: 12
+    omp_params:
+      # omp params
+      num_threads: 1
diff --git a/perception/image_projection_based_fusion/docs/pointpainting-fusion.md b/perception/image_projection_based_fusion/docs/pointpainting-fusion.md
index d50a3f97390ca..5f5e45941c549 100644
--- a/perception/image_projection_based_fusion/docs/pointpainting-fusion.md
+++ b/perception/image_projection_based_fusion/docs/pointpainting-fusion.md
@@ -36,8 +36,8 @@ The lidar points are projected onto the output of an image-only 2d object detect
 | Name                            | Type   | Default Value | Description                                                 |
 | ------------------------------- | ------ | ------------- | ----------------------------------------------------------- |
 | `score_threshold`               | float  | `0.4`         | detected objects with score less than threshold are ignored |
-| `densification_world_frame_id`  | string | `map`         | the world frame id to fuse multi-frame pointcloud           |
-| `densification_num_past_frames` | int    | `0`           | the number of past frames to fuse with the current frame    |
+| `densification.world_frame_id`  | string | `map`         | the world frame id to fuse multi-frame pointcloud           |
+| `densification.num_past_frames` | int    | `1`           | the number of past frames to fuse with the current frame    |
 | `trt_precision`                 | string | `fp16`        | TensorRT inference precision: `fp32` or `fp16`              |
 | `encoder_onnx_path`             | string | `""`          | path to VoxelFeatureEncoder ONNX file                       |
 | `encoder_engine_path`           | string | `""`          | path to VoxelFeatureEncoder TensorRT Engine file            |
diff --git a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
index 2cd6130cf1be3..db6be77d2ff74 100644
--- a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
+++ b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
@@ -21,8 +21,8 @@
   <arg name="data_path" default="$(env HOME)/autoware_data" description="packages data and artifacts directory path"/>
   <arg name="model_name" default="pointpainting" description="options: `pointpainting`"/>
   <arg name="model_path" default="$(var data_path)/image_projection_based_fusion"/>
-  <arg name="model_param_path" default="$(find-pkg-share model_path)/$(var model_name).param.yaml"/>
-  <arg name="ml_package_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/pointpainting_ml_package.param.yaml"/>
+  <arg name="model_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/pointpainting.param.yaml"/>
+  <arg name="ml_package_param_path" default="$(var model_path)/$(var model_name)_ml_package.param.yaml"/>
   <arg name="class_remapper_param_path" default="$(var model_path)/detection_class_remapper.param.yaml"/>
   <arg name="sync_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/roi_sync.param.yaml"/>
 
diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
index 1d1fcb5b0ad16..747e22a1379fa 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
@@ -6,85 +6,69 @@
     "pointpainting": {
       "type": "object",
       "properties": {
-        "model_params": {
+        "build_only": {
+          "type": "boolean",
+          "description": "shutdown the node after TensorRT engine file is built.",
+          "default": false
+        },
+        "trt_precision": {
+          "type": "string",
+          "description": "TensorRT inference precision.",
+          "default": "fp16",
+          "enum": ["fp32", "fp16"]
+        },
+        "post_process_params": {
           "type": "object",
-          "description": "Parameters for model configuration.",
           "properties": {
-            "class_names": {
-              "type": "array",
-              "description": "An array of class names will be predicted.",
-              "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"],
-              "uniqueItems": true
+            "score_threshold": {
+              "type": "number",
+              "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
+              "default": 0.4,
+              "minimum": 0.0,
+              "maximum": 1.0
             },
-            "paint_class_names": {
+            "yaw_norm_thresholds": {
               "type": "array",
-              "description": "An array of class names will be painted by PointPainting",
-              "default": ["CAR", "BICYCLE", "PEDESTRIAN"],
-              "uniqueItems": true
+              "description": "An array of distance threshold values of norm of yaw [rad].",
+              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
+              "minimum": 0.0,
+              "maximum": 1.0
             },
-            "point_feature_size": {
-              "type": "integer",
-              "description": "A number of channels of point feature layer.",
-              "default": 7
+            "circle_nms_dist_threshold": {
+              "type": "number",
+              "description": "",
+              "default": 0.3,
+              "minimum": 0.0,
+              "maximum": 1.0
             },
-            "max_voxel_size": {
-              "type": "integer",
-              "description": "A maximum size of voxel grid.",
-              "default": 40000
-            },
-            "point_cloud_range": {
+            "iou_nms_target_class_names": {
               "type": "array",
-              "description": "An array of distance ranges of each class, this must have same length with `class_names`.",
-              "default": [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0]
+              "description": "An array of class names to be target in NMS.",
+              "default": ["CAR"],
+              "uniqueItems": true
             },
-            "voxel_size": {
-              "type": "array",
-              "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.",
-              "default": [0.32, 0.32, 8.0]
+            "iou_search_distance_2d": {
+              "type": "number",
+              "description": "A maximum distance value to search the nearest objects.",
+              "default": 10.0,
+              "minimum": 0.0
             },
-            "down_sample_factor": {
-              "type": "integer",
-              "description": "A scale factor of downsampling points",
-              "default": 1,
-              "minimum": 1
+            "iou_nms_threshold": {
+              "type": "number",
+              "description": "A threshold value of NMS using IoU score.",
+              "default": 0.1,
+              "minimum": 0.0,
+              "maximum": 1.0
             },
-            "encoder_in_feature_size": {
-              "type": "integer",
-              "description": "A size of encoder input feature channels.",
-              "default": 12
-            }
-          }
-        },
-        "densification_params": {
-          "type": "object",
-          "description": "Parameters for pointcloud densification.",
-          "properties": {
-            "world_frame_id": {
-              "type": "string",
-              "description": "A name of frame id where world coordinates system is defined with respect to.",
-              "default": "map"
-            },
-            "num_past_frames": {
-              "type": "integer",
-              "description": "A number of past frames to be considered as same input frame.",
-              "default": 0,
-              "minimum": 0
-            }
-          }
-        },
-        "omp_params": {
-          "type": "object",
-          "properties": {
-            "num_threads": {
-              "type": "integer",
-              "description": "The number of threads that is set to the environment variable OMP_NUM_THREADS.",
-              "default": 1,
-              "minimum": 1
+            "has_twist": {
+              "type": "boolean",
+              "description": "Indicates whether the model outputs twist value.",
+              "default": false
             }
           }
         }
       },
-      "required": ["model_params", "densification_params", "omp_params"]
+      "required": ["post_process_params", "densification_params"]
     }
   },
   "properties": {
diff --git a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
index 272ad26e38b30..34ff35b96a2da 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
@@ -1,74 +1,90 @@
 {
   "$schema": "http://json-schema.org/draft-07/schema#",
-  "title": "Parameters for Point Painting Fusion Node",
+  "title": "Parameters for Point Painting ML model",
   "type": "object",
   "definitions": {
     "pointpainting_ml_package": {
       "type": "object",
       "properties": {
-        "build_only": {
-          "type": "boolean",
-          "description": "shutdown the node after TensorRT engine file is built.",
-          "default": false
-        },
-        "trt_precision": {
-          "type": "string",
-          "description": "TensorRT inference precision.",
-          "default": "fp16",
-          "enum": ["fp32", "fp16"]
-        },
-        "post_process_params": {
+        "model_params": {
           "type": "object",
+          "description": "Parameters for model configuration.",
           "properties": {
-            "score_threshold": {
-              "type": "number",
-              "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
-              "default": 0.4,
-              "minimum": 0.0,
-              "maximum": 1.0
+            "class_names": {
+              "type": "array",
+              "description": "An array of class names will be predicted.",
+              "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"],
+              "uniqueItems": true
             },
-            "yaw_norm_thresholds": {
+            "paint_class_names": {
               "type": "array",
-              "description": "An array of distance threshold values of norm of yaw [rad].",
-              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
-              "minimum": 0.0,
-              "maximum": 1.0
+              "description": "An array of class names will be painted by PointPainting",
+              "default": ["CAR", "BICYCLE", "PEDESTRIAN"],
+              "uniqueItems": true
             },
-            "circle_nms_dist_threshold": {
-              "type": "number",
-              "description": "",
-              "default": 0.3,
-              "minimum": 0.0,
-              "maximum": 1.0
+            "point_feature_size": {
+              "type": "integer",
+              "description": "A number of channels of point feature layer.",
+              "default": 7
             },
-            "iou_nms_target_class_names": {
+            "max_voxel_size": {
+              "type": "integer",
+              "description": "A maximum size of voxel grid.",
+              "default": 40000
+            },
+            "point_cloud_range": {
               "type": "array",
-              "description": "An array of class names to be target in NMS.",
-              "default": ["CAR"],
-              "uniqueItems": true
+              "description": "An array of distance ranges of each class, this must have same length with `class_names`.",
+              "default": [-121.6, -76.8, -3.0, 121.6, 76.8, 5.0]
             },
-            "iou_search_distance_2d": {
-              "type": "number",
-              "description": "A maximum distance value to search the nearest objects.",
-              "default": 10.0,
-              "minimum": 0.0
+            "voxel_size": {
+              "type": "array",
+              "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.",
+              "default": [0.32, 0.32, 8.0]
             },
-            "iou_nms_threshold": {
-              "type": "number",
-              "description": "A threshold value of NMS using IoU score.",
-              "default": 0.1,
-              "minimum": 0.0,
-              "maximum": 1.0
+            "down_sample_factor": {
+              "type": "integer",
+              "description": "A scale factor of downsampling points",
+              "default": 1,
+              "minimum": 1
             },
-            "has_twist": {
-              "type": "boolean",
-              "description": "Indicates whether the model outputs twist value.",
-              "default": false
+            "encoder_in_feature_size": {
+              "type": "integer",
+              "description": "A size of encoder input feature channels.",
+              "default": 12
+            }
+          }
+        },
+        "densification_params": {
+          "type": "object",
+          "description": "Parameters for pointcloud densification.",
+          "properties": {
+            "world_frame_id": {
+              "type": "string",
+              "description": "A name of frame id where world coordinates system is defined with respect to.",
+              "default": "map"
+            },
+            "num_past_frames": {
+              "type": "integer",
+              "description": "A number of past frames to be considered as same input frame.",
+              "default": 0,
+              "minimum": 0
+            }
+          }
+        },
+        "omp_params": {
+          "type": "object",
+          "properties": {
+            "num_threads": {
+              "type": "integer",
+              "description": "The number of threads that is set to the environment variable OMP_NUM_THREADS.",
+              "default": 1,
+              "minimum": 1
             }
           }
         }
       },
-      "required": ["post_process_params"]
+      "required": ["model_params", "omp_params"]
     }
   },
   "properties": {
diff --git a/perception/lidar_centerpoint/config/centerpoint.param.yaml b/perception/lidar_centerpoint/config/centerpoint.param.yaml
index 398b28d08d025..fb2023e0172ac 100644
--- a/perception/lidar_centerpoint/config/centerpoint.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint.param.yaml
@@ -1,11 +1,22 @@
 /**:
   ros__parameters:
-    class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
-    point_feature_size: 4
-    max_voxel_size: 40000
-    point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
-    voxel_size: [0.32, 0.32, 10.0]
-    downsample_factor: 1
-    encoder_in_feature_size: 9
+    score_threshold: 0.35
+    has_twist: false
+    build_only: false
+
+    # weight files
+    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
+    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
+    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
+    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
+
+    # post-process params
+    circle_nms_dist_threshold: 0.5
+    iou_nms_target_class_names: ["CAR"]
+    iou_nms_search_distance_2d: 10.0
+    iou_nms_threshold: 0.1
+    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
+    trt_precision: fp16
+
     densification_num_past_frames: 1
     densification_world_frame_id: map
diff --git a/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml b/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
new file mode 100644
index 0000000000000..22ef65101287c
--- /dev/null
+++ b/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
@@ -0,0 +1,9 @@
+/**:
+  ros__parameters:
+    class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
+    point_feature_size: 4
+    max_voxel_size: 40000
+    point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+    voxel_size: [0.32, 0.32, 10.0]
+    downsample_factor: 1
+    encoder_in_feature_size: 9
diff --git a/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml b/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
similarity index 79%
rename from perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
rename to perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
index b7055936b098f..69b8e7403d370 100644
--- a/perception/lidar_centerpoint/config/centerpoint_tiny.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
@@ -7,5 +7,3 @@
     voxel_size: [0.32, 0.32, 10.0]
     downsample_factor: 2
     encoder_in_feature_size: 9
-    densification_num_past_frames: 1
-    densification_world_frame_id: map
diff --git a/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml b/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
deleted file mode 100644
index 60576d3139523..0000000000000
--- a/perception/lidar_centerpoint/config/lidar_centerpoint_ml_package.param.yaml
+++ /dev/null
@@ -1,19 +0,0 @@
-/**:
-  ros__parameters:
-    score_threshold: 0.35
-    has_twist: false
-    build_only: false
-
-    # weight files
-    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
-    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
-    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
-    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
-
-    # post-process params
-    circle_nms_dist_threshold: 0.5
-    iou_nms_target_class_names: ["CAR"]
-    iou_nms_search_distance_2d: 10.0
-    iou_nms_threshold: 0.1
-    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-    trt_precision: fp16
diff --git a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
index 09d248974cd21..c3565b386060e 100644
--- a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
+++ b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
@@ -5,8 +5,8 @@
   <arg name="data_path" default="$(env HOME)/autoware_data" description="packages data and artifacts directory path"/>
   <arg name="model_name" default="centerpoint_tiny" description="options: `centerpoint` or `centerpoint_tiny`"/>
   <arg name="model_path" default="$(var data_path)/lidar_centerpoint"/>
-  <arg name="model_param_path" default="$(var model_path)/$(var model_name).param.yaml"/>
-  <arg name="ml_package_param_path" default="$(find-pkg-share lidar_centerpoint)/config/lidar_centerpoint_ml_package.param.yaml"/>
+  <arg name="model_param_path" default="$(find-pkg-share lidar_centerpoint)/config/centerpoint.param.yaml"/>
+  <arg name="ml_package_param_path" default="$(var model_path)/$(var model_name)_ml_package.param.yaml"/>
   <arg name="class_remapper_param_path" default="$(var model_path)/detection_class_remapper.param.yaml"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>

From 12890c5af469124381727faa677047e24cd2d655 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Wed, 10 Apr 2024 22:35:38 +0900
Subject: [PATCH 09/20] chore: typo

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../detection/detector/lidar_dnn_detector.launch.xml          | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
index f8fad9ef056d8..9ede1ad489b70 100644
--- a/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
+++ b/launch/tier4_perception_launch/launch/object_recognition/detection/detector/lidar_dnn_detector.launch.xml
@@ -17,8 +17,8 @@
         <arg name="model_name" value="$(var centerpoint_model_name)"/>
         <arg name="model_path" value="$(var centerpoint_model_path)"/>
         <arg name="model_param_path" value="$(var lidar_model_param_path)/centerpoint.param.yaml"/>
-        <arg name="ml_package_param_path" value="$(var model_path)/$(var centerpoint_model_name)_ml_package.param.yaml"/>
-        <arg name="class_remapper_param_path" value="$(var model_path)/detection_class_remapper.param.yaml"/>
+        <arg name="ml_package_param_path" value="$(var centerpoint_model_path)/$(var centerpoint_model_name)_ml_package.param.yaml"/>
+        <arg name="class_remapper_param_path" value="$(var centerpoint_model_path)/detection_class_remapper.param.yaml"/>
 
         <arg name="use_pointcloud_container" value="true"/>
         <arg name="pointcloud_container_name" value="$(var pointcloud_container_name)"/>

From f30631b30a14a03c426ce063e311c084db23a97a Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 11 Apr 2024 09:13:36 +0900
Subject: [PATCH 10/20] fix: align centerpoint param namespace with
 pointpainting

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/centerpoint.param.yaml             | 27 ++++++------
 .../config/centerpoint_ml_package.param.yaml  | 15 ++++---
 .../centerpoint_tiny_ml_package.param.yaml    | 15 ++++---
 perception/lidar_centerpoint/src/node.cpp     | 44 ++++++++++---------
 4 files changed, 52 insertions(+), 49 deletions(-)

diff --git a/perception/lidar_centerpoint/config/centerpoint.param.yaml b/perception/lidar_centerpoint/config/centerpoint.param.yaml
index fb2023e0172ac..7f61a00ac009a 100644
--- a/perception/lidar_centerpoint/config/centerpoint.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint.param.yaml
@@ -1,22 +1,21 @@
 /**:
   ros__parameters:
-    score_threshold: 0.35
-    has_twist: false
-    build_only: false
-
     # weight files
     encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
     encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
     head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
     head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
-
-    # post-process params
-    circle_nms_dist_threshold: 0.5
-    iou_nms_target_class_names: ["CAR"]
-    iou_nms_search_distance_2d: 10.0
-    iou_nms_threshold: 0.1
-    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
+    build_only: false
     trt_precision: fp16
-
-    densification_num_past_frames: 1
-    densification_world_frame_id: map
+    post_process_params:
+      # post-process params
+      circle_nms_dist_threshold: 0.5
+      iou_nms_target_class_names: ["CAR"]
+      iou_nms_search_distance_2d: 10.0
+      iou_nms_threshold: 0.1
+      score_threshold: 0.35
+      yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
+      has_twist: false
+    densification_params:
+      world_frame_id: map
+      num_past_frames: 1
diff --git a/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml b/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
index 22ef65101287c..4639cd4b6817f 100644
--- a/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
@@ -1,9 +1,10 @@
 /**:
   ros__parameters:
-    class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
-    point_feature_size: 4
-    max_voxel_size: 40000
-    point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
-    voxel_size: [0.32, 0.32, 10.0]
-    downsample_factor: 1
-    encoder_in_feature_size: 9
+    model_params:
+      class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
+      point_feature_size: 4
+      max_voxel_size: 40000
+      point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+      voxel_size: [0.32, 0.32, 10.0]
+      downsample_factor: 1
+      encoder_in_feature_size: 9
diff --git a/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml b/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
index 69b8e7403d370..5c6b6dddb5772 100644
--- a/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
@@ -1,9 +1,10 @@
 /**:
   ros__parameters:
-    class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
-    point_feature_size: 4
-    max_voxel_size: 40000
-    point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
-    voxel_size: [0.32, 0.32, 10.0]
-    downsample_factor: 2
-    encoder_in_feature_size: 9
+    model_params:
+      class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
+      point_feature_size: 4
+      max_voxel_size: 40000
+      point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+      voxel_size: [0.32, 0.32, 10.0]
+      downsample_factor: 2
+      encoder_in_feature_size: 9
diff --git a/perception/lidar_centerpoint/src/node.cpp b/perception/lidar_centerpoint/src/node.cpp
index 09d2702bb4566..52079ed4e01c6 100644
--- a/perception/lidar_centerpoint/src/node.cpp
+++ b/perception/lidar_centerpoint/src/node.cpp
@@ -39,33 +39,34 @@ LidarCenterPointNode::LidarCenterPointNode(const rclcpp::NodeOptions & node_opti
 : Node("lidar_center_point", node_options), tf_buffer_(this->get_clock())
 {
   const float score_threshold =
-    static_cast<float>(this->declare_parameter<double>("score_threshold"));
-  const float circle_nms_dist_threshold =
-    static_cast<float>(this->declare_parameter<double>("circle_nms_dist_threshold"));
+    static_cast<float>(this->declare_parameter<double>("post_process_params.score_threshold"));
+  const float circle_nms_dist_threshold = static_cast<float>(
+    this->declare_parameter<double>("post_process_params.circle_nms_dist_threshold"));
   const auto yaw_norm_thresholds =
-    this->declare_parameter<std::vector<double>>("yaw_norm_thresholds");
+    this->declare_parameter<std::vector<double>>("post_process_params.yaw_norm_thresholds");
   const std::string densification_world_frame_id =
-    this->declare_parameter<std::string>("densification_world_frame_id");
+    this->declare_parameter<std::string>("densification_params.world_frame_id");
   const int densification_num_past_frames =
-    this->declare_parameter<int>("densification_num_past_frames");
+    this->declare_parameter<int>("densification_params.num_past_frames");
   const std::string trt_precision = this->declare_parameter<std::string>("trt_precision");
   const std::string encoder_onnx_path = this->declare_parameter<std::string>("encoder_onnx_path");
   const std::string encoder_engine_path =
     this->declare_parameter<std::string>("encoder_engine_path");
   const std::string head_onnx_path = this->declare_parameter<std::string>("head_onnx_path");
   const std::string head_engine_path = this->declare_parameter<std::string>("head_engine_path");
-  class_names_ = this->declare_parameter<std::vector<std::string>>("class_names");
-  has_twist_ = this->declare_parameter<bool>("has_twist");
-  const std::size_t point_feature_size =
-    static_cast<std::size_t>(this->declare_parameter<std::int64_t>("point_feature_size"));
+  class_names_ = this->declare_parameter<std::vector<std::string>>("model_params.class_names");
+  has_twist_ = this->declare_parameter<bool>("post_process_params.has_twist");
+  const std::size_t point_feature_size = static_cast<std::size_t>(
+    this->declare_parameter<std::int64_t>("model_params.point_feature_size"));
   const std::size_t max_voxel_size =
-    static_cast<std::size_t>(this->declare_parameter<std::int64_t>("max_voxel_size"));
-  const auto point_cloud_range = this->declare_parameter<std::vector<double>>("point_cloud_range");
-  const auto voxel_size = this->declare_parameter<std::vector<double>>("voxel_size");
-  const std::size_t downsample_factor =
-    static_cast<std::size_t>(this->declare_parameter<std::int64_t>("downsample_factor"));
-  const std::size_t encoder_in_feature_size =
-    static_cast<std::size_t>(this->declare_parameter<std::int64_t>("encoder_in_feature_size"));
+    static_cast<std::size_t>(this->declare_parameter<std::int64_t>("model_params.max_voxel_size"));
+  const auto point_cloud_range =
+    this->declare_parameter<std::vector<double>>("model_params.point_cloud_range");
+  const auto voxel_size = this->declare_parameter<std::vector<double>>("model_params.voxel_size");
+  const std::size_t downsample_factor = static_cast<std::size_t>(
+    this->declare_parameter<std::int64_t>("model_params.downsample_factor"));
+  const std::size_t encoder_in_feature_size = static_cast<std::size_t>(
+    this->declare_parameter<std::int64_t>("model_params.encoder_in_feature_size"));
   const auto allow_remapping_by_area_matrix =
     this->declare_parameter<std::vector<int64_t>>("allow_remapping_by_area_matrix");
   const auto min_area_matrix = this->declare_parameter<std::vector<double>>("min_area_matrix");
@@ -77,10 +78,11 @@ LidarCenterPointNode::LidarCenterPointNode(const rclcpp::NodeOptions & node_opti
   {
     NMSParams p;
     p.nms_type_ = NMS_TYPE::IoU_BEV;
-    p.target_class_names_ =
-      this->declare_parameter<std::vector<std::string>>("iou_nms_target_class_names");
-    p.search_distance_2d_ = this->declare_parameter<double>("iou_nms_search_distance_2d");
-    p.iou_threshold_ = this->declare_parameter<double>("iou_nms_threshold");
+    p.target_class_names_ = this->declare_parameter<std::vector<std::string>>(
+      "post_process_params.iou_nms_target_class_names");
+    p.search_distance_2d_ =
+      this->declare_parameter<double>("post_process_params.iou_nms_search_distance_2d");
+    p.iou_threshold_ = this->declare_parameter<double>("post_process_params.iou_nms_threshold");
     iou_bev_nms_.setParameters(p);
   }
 

From 408f611dcf269076a9bb61fe23af6dcf01476287 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 11 Apr 2024 09:45:57 +0900
Subject: [PATCH 11/20] fix(centerpoint): add schema json

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../schema/centerpoint.schemal.json           | 104 ++++++++++++++++++
 .../schema/centerpoint_ml_package.schema.json |  68 ++++++++++++
 .../centerpoint_tiny_ml_package.schema.json   |  69 ++++++++++++
 3 files changed, 241 insertions(+)
 create mode 100644 perception/lidar_centerpoint/schema/centerpoint.schemal.json
 create mode 100644 perception/lidar_centerpoint/schema/centerpoint_ml_package.schema.json
 create mode 100644 perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json

diff --git a/perception/lidar_centerpoint/schema/centerpoint.schemal.json b/perception/lidar_centerpoint/schema/centerpoint.schemal.json
new file mode 100644
index 0000000000000..1b1b3f06f70d2
--- /dev/null
+++ b/perception/lidar_centerpoint/schema/centerpoint.schemal.json
@@ -0,0 +1,104 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "title": "Parameters for Lidar Centerpoint Node",
+    "type": "object",
+    "definitions": {
+      "centerpoint": {
+        "type": "object",
+        "properties": {
+          "build_only": {
+            "type": "boolean",
+            "description": "shutdown the node after TensorRT engine file is built.",
+            "default": false
+          },
+          "trt_precision": {
+            "type": "string",
+            "description": "TensorRT inference precision.",
+            "default": "fp16",
+            "enum": ["fp32", "fp16"]
+          },
+          "post_process_params": {
+            "type": "object",
+            "properties": {
+              "score_threshold": {
+                "type": "number",
+                "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
+                "default": 0.35,
+                "minimum": 0.0,
+                "maximum": 1.0
+              },
+              "yaw_norm_thresholds": {
+                "type": "array",
+                "description": "An array of distance threshold values of norm of yaw [rad].",
+                "default": [0.3, 0.3, 0.3, 0.3, 0.0],
+                "minimum": 0.0,
+                "maximum": 1.0
+              },
+              "circle_nms_dist_threshold": {
+                "type": "number",
+                "description": "",
+                "default": 0.5,
+                "minimum": 0.0,
+                "maximum": 1.0
+              },
+              "iou_nms_target_class_names": {
+                "type": "array",
+                "description": "An array of class names to be target in NMS.",
+                "default": ["CAR"],
+                "uniqueItems": true
+              },
+              "iou_nms_search_distance_2d": {
+                "type": "number",
+                "description": "A maximum distance value to search the nearest objects.",
+                "default": 10.0,
+                "minimum": 0.0
+              },
+              "iou_nms_threshold": {
+                "type": "number",
+                "description": "A threshold value of NMS using IoU score.",
+                "default": 0.1,
+                "minimum": 0.0,
+                "maximum": 1.0
+              },
+              "has_twist": {
+                "type": "boolean",
+                "description": "Indicates whether the model outputs twist value.",
+                "default": false
+              }
+            }
+          },
+          "densification_params": {
+            "type": "object",
+            "description": "Parameters for pointcloud densification.",
+            "properties": {
+              "world_frame_id": {
+                "type": "string",
+                "description": "A name of frame id where world coordinates system is defined with respect to.",
+                "default": "map"
+              },
+              "num_past_frames": {
+                "type": "integer",
+                "description": "A number of past frames to be considered as same input frame.",
+                "default": 1,
+                "minimum": 0
+              }
+            }
+          }
+        },
+        "required": ["post_process_params", "densification_params"]
+      }
+    },
+    "properties": {
+      "/**": {
+        "type": "object",
+        "properties": {
+          "ros__parameters": {
+            "$ref": "#/definitions/centerpoint"
+          }
+        },
+        "required": ["ros__parameters"]
+      }
+    },
+    "required": ["/**"]
+  }
+  
\ No newline at end of file
diff --git a/perception/lidar_centerpoint/schema/centerpoint_ml_package.schema.json b/perception/lidar_centerpoint/schema/centerpoint_ml_package.schema.json
new file mode 100644
index 0000000000000..2c5655c6201ed
--- /dev/null
+++ b/perception/lidar_centerpoint/schema/centerpoint_ml_package.schema.json
@@ -0,0 +1,68 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Parameters for Centerpoint ML model",
+  "type": "object",
+  "definitions": {
+    "centerpoint_ml_package": {
+      "type": "object",
+      "properties": {
+        "model_params": {
+          "type": "object",
+          "description": "Parameters for model configuration.",
+          "properties": {
+            "class_names": {
+              "type": "array",
+              "description": "An array of class names will be predicted.",
+              "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"],
+              "uniqueItems": true
+            },
+            "point_feature_size": {
+              "type": "integer",
+              "description": "A number of channels of point feature layer.",
+              "default": 4
+            },
+            "max_voxel_size": {
+              "type": "integer",
+              "description": "A maximum size of voxel grid.",
+              "default": 40000
+            },
+            "point_cloud_range": {
+              "type": "array",
+              "description": "An array of distance ranges of each class, this must have same length with `class_names`.",
+              "default": [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+            },
+            "voxel_size": {
+              "type": "array",
+              "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.",
+              "default": [0.32, 0.32, 10.0]
+            },
+            "down_sample_factor": {
+              "type": "integer",
+              "description": "A scale factor of downsampling points",
+              "default": 1,
+              "minimum": 1
+            },
+            "encoder_in_feature_size": {
+              "type": "integer",
+              "description": "A size of encoder input feature channels.",
+              "default": 9
+            }
+          }
+        }
+      },
+      "required": ["model_params"]
+    }
+  },
+  "properties": {
+    "/**": {
+      "type": "object",
+      "properties": {
+        "ros__parameters": {
+          "$ref": "#/definitions/centerpoint_ml_package"
+        }
+      },
+      "required": ["ros__parameters"]
+    }
+  },
+  "required": ["/**"]
+}
diff --git a/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json b/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json
new file mode 100644
index 0000000000000..b77c4baaa2ced
--- /dev/null
+++ b/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json
@@ -0,0 +1,69 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "title": "Parameters for Centerpoint Tiny ML model",
+    "type": "object",
+    "definitions": {
+      "centerpoint_tiny_ml_package": {
+        "type": "object",
+        "properties": {
+          "model_params": {
+            "type": "object",
+            "description": "Parameters for model configuration.",
+            "properties": {
+              "class_names": {
+                "type": "array",
+                "description": "An array of class names will be predicted.",
+                "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"],
+                "uniqueItems": true
+              },
+              "point_feature_size": {
+                "type": "integer",
+                "description": "A number of channels of point feature layer.",
+                "default": 4
+              },
+              "max_voxel_size": {
+                "type": "integer",
+                "description": "A maximum size of voxel grid.",
+                "default": 40000
+              },
+              "point_cloud_range": {
+                "type": "array",
+                "description": "An array of distance ranges of each class, this must have same length with `class_names`.",
+                "default": [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+              },
+              "voxel_size": {
+                "type": "array",
+                "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.",
+                "default": [0.32, 0.32, 10.0]
+              },
+              "down_sample_factor": {
+                "type": "integer",
+                "description": "A scale factor of downsampling points",
+                "default": 2,
+                "minimum": 1
+              },
+              "encoder_in_feature_size": {
+                "type": "integer",
+                "description": "A size of encoder input feature channels.",
+                "default": 9
+              }
+            }
+          }
+        },
+        "required": ["model_params"]
+      }
+    },
+    "properties": {
+      "/**": {
+        "type": "object",
+        "properties": {
+          "ros__parameters": {
+            "$ref": "#/definitions/centerpoint_tiny_ml_package"
+          }
+        },
+        "required": ["ros__parameters"]
+      }
+    },
+    "required": ["/**"]
+  }
+  
\ No newline at end of file

From daa82b56bc98e02644a25fc9568fb914c6b184dc Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 11 Apr 2024 09:46:20 +0900
Subject: [PATCH 12/20] fix(pointpainting): fix schema json typo

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../schema/pointpainting.schema.json          | 19 ++++++++++++++++++-
 .../pointpainting_ml_package.schema.json      | 17 -----------------
 2 files changed, 18 insertions(+), 18 deletions(-)

diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
index 747e22a1379fa..e2db17bc2a530 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
@@ -47,7 +47,7 @@
               "default": ["CAR"],
               "uniqueItems": true
             },
-            "iou_search_distance_2d": {
+            "iou_nms_search_distance_2d": {
               "type": "number",
               "description": "A maximum distance value to search the nearest objects.",
               "default": 10.0,
@@ -66,6 +66,23 @@
               "default": false
             }
           }
+        },
+        "densification_params": {
+          "type": "object",
+          "description": "Parameters for pointcloud densification.",
+          "properties": {
+            "world_frame_id": {
+              "type": "string",
+              "description": "A name of frame id where world coordinates system is defined with respect to.",
+              "default": "map"
+            },
+            "num_past_frames": {
+              "type": "integer",
+              "description": "A number of past frames to be considered as same input frame.",
+              "default": 0,
+              "minimum": 0
+            }
+          }
         }
       },
       "required": ["post_process_params", "densification_params"]
diff --git a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
index 34ff35b96a2da..9617bad77ea21 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
@@ -55,23 +55,6 @@
             }
           }
         },
-        "densification_params": {
-          "type": "object",
-          "description": "Parameters for pointcloud densification.",
-          "properties": {
-            "world_frame_id": {
-              "type": "string",
-              "description": "A name of frame id where world coordinates system is defined with respect to.",
-              "default": "map"
-            },
-            "num_past_frames": {
-              "type": "integer",
-              "description": "A number of past frames to be considered as same input frame.",
-              "default": 0,
-              "minimum": 0
-            }
-          }
-        },
         "omp_params": {
           "type": "object",
           "properties": {

From 1724c8b1bdb66398aed626ea0bd99319698d17f0 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
 <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Thu, 11 Apr 2024 00:48:27 +0000
Subject: [PATCH 13/20] style(pre-commit): autofix

---
 .../schema/centerpoint.schemal.json           | 197 +++++++++---------
 .../centerpoint_tiny_ml_package.schema.json   | 131 ++++++------
 2 files changed, 163 insertions(+), 165 deletions(-)

diff --git a/perception/lidar_centerpoint/schema/centerpoint.schemal.json b/perception/lidar_centerpoint/schema/centerpoint.schemal.json
index 1b1b3f06f70d2..d8abf5c00be0e 100644
--- a/perception/lidar_centerpoint/schema/centerpoint.schemal.json
+++ b/perception/lidar_centerpoint/schema/centerpoint.schemal.json
@@ -1,104 +1,103 @@
 {
-    "$schema": "http://json-schema.org/draft-07/schema#",
-    "title": "Parameters for Lidar Centerpoint Node",
-    "type": "object",
-    "definitions": {
-      "centerpoint": {
-        "type": "object",
-        "properties": {
-          "build_only": {
-            "type": "boolean",
-            "description": "shutdown the node after TensorRT engine file is built.",
-            "default": false
-          },
-          "trt_precision": {
-            "type": "string",
-            "description": "TensorRT inference precision.",
-            "default": "fp16",
-            "enum": ["fp32", "fp16"]
-          },
-          "post_process_params": {
-            "type": "object",
-            "properties": {
-              "score_threshold": {
-                "type": "number",
-                "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
-                "default": 0.35,
-                "minimum": 0.0,
-                "maximum": 1.0
-              },
-              "yaw_norm_thresholds": {
-                "type": "array",
-                "description": "An array of distance threshold values of norm of yaw [rad].",
-                "default": [0.3, 0.3, 0.3, 0.3, 0.0],
-                "minimum": 0.0,
-                "maximum": 1.0
-              },
-              "circle_nms_dist_threshold": {
-                "type": "number",
-                "description": "",
-                "default": 0.5,
-                "minimum": 0.0,
-                "maximum": 1.0
-              },
-              "iou_nms_target_class_names": {
-                "type": "array",
-                "description": "An array of class names to be target in NMS.",
-                "default": ["CAR"],
-                "uniqueItems": true
-              },
-              "iou_nms_search_distance_2d": {
-                "type": "number",
-                "description": "A maximum distance value to search the nearest objects.",
-                "default": 10.0,
-                "minimum": 0.0
-              },
-              "iou_nms_threshold": {
-                "type": "number",
-                "description": "A threshold value of NMS using IoU score.",
-                "default": 0.1,
-                "minimum": 0.0,
-                "maximum": 1.0
-              },
-              "has_twist": {
-                "type": "boolean",
-                "description": "Indicates whether the model outputs twist value.",
-                "default": false
-              }
-            }
-          },
-          "densification_params": {
-            "type": "object",
-            "description": "Parameters for pointcloud densification.",
-            "properties": {
-              "world_frame_id": {
-                "type": "string",
-                "description": "A name of frame id where world coordinates system is defined with respect to.",
-                "default": "map"
-              },
-              "num_past_frames": {
-                "type": "integer",
-                "description": "A number of past frames to be considered as same input frame.",
-                "default": 1,
-                "minimum": 0
-              }
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Parameters for Lidar Centerpoint Node",
+  "type": "object",
+  "definitions": {
+    "centerpoint": {
+      "type": "object",
+      "properties": {
+        "build_only": {
+          "type": "boolean",
+          "description": "shutdown the node after TensorRT engine file is built.",
+          "default": false
+        },
+        "trt_precision": {
+          "type": "string",
+          "description": "TensorRT inference precision.",
+          "default": "fp16",
+          "enum": ["fp32", "fp16"]
+        },
+        "post_process_params": {
+          "type": "object",
+          "properties": {
+            "score_threshold": {
+              "type": "number",
+              "description": "A threshold value of existence probability score, all of objects with score less than this threshold are ignored.",
+              "default": 0.35,
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "yaw_norm_thresholds": {
+              "type": "array",
+              "description": "An array of distance threshold values of norm of yaw [rad].",
+              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "circle_nms_dist_threshold": {
+              "type": "number",
+              "description": "",
+              "default": 0.5,
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "iou_nms_target_class_names": {
+              "type": "array",
+              "description": "An array of class names to be target in NMS.",
+              "default": ["CAR"],
+              "uniqueItems": true
+            },
+            "iou_nms_search_distance_2d": {
+              "type": "number",
+              "description": "A maximum distance value to search the nearest objects.",
+              "default": 10.0,
+              "minimum": 0.0
+            },
+            "iou_nms_threshold": {
+              "type": "number",
+              "description": "A threshold value of NMS using IoU score.",
+              "default": 0.1,
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
+            "has_twist": {
+              "type": "boolean",
+              "description": "Indicates whether the model outputs twist value.",
+              "default": false
             }
           }
         },
-        "required": ["post_process_params", "densification_params"]
-      }
-    },
-    "properties": {
-      "/**": {
-        "type": "object",
-        "properties": {
-          "ros__parameters": {
-            "$ref": "#/definitions/centerpoint"
+        "densification_params": {
+          "type": "object",
+          "description": "Parameters for pointcloud densification.",
+          "properties": {
+            "world_frame_id": {
+              "type": "string",
+              "description": "A name of frame id where world coordinates system is defined with respect to.",
+              "default": "map"
+            },
+            "num_past_frames": {
+              "type": "integer",
+              "description": "A number of past frames to be considered as same input frame.",
+              "default": 1,
+              "minimum": 0
+            }
           }
-        },
-        "required": ["ros__parameters"]
-      }
-    },
-    "required": ["/**"]
-  }
-  
\ No newline at end of file
+        }
+      },
+      "required": ["post_process_params", "densification_params"]
+    }
+  },
+  "properties": {
+    "/**": {
+      "type": "object",
+      "properties": {
+        "ros__parameters": {
+          "$ref": "#/definitions/centerpoint"
+        }
+      },
+      "required": ["ros__parameters"]
+    }
+  },
+  "required": ["/**"]
+}
diff --git a/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json b/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json
index b77c4baaa2ced..55329e21ea8ab 100644
--- a/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json
+++ b/perception/lidar_centerpoint/schema/centerpoint_tiny_ml_package.schema.json
@@ -1,69 +1,68 @@
 {
-    "$schema": "http://json-schema.org/draft-07/schema#",
-    "title": "Parameters for Centerpoint Tiny ML model",
-    "type": "object",
-    "definitions": {
-      "centerpoint_tiny_ml_package": {
-        "type": "object",
-        "properties": {
-          "model_params": {
-            "type": "object",
-            "description": "Parameters for model configuration.",
-            "properties": {
-              "class_names": {
-                "type": "array",
-                "description": "An array of class names will be predicted.",
-                "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"],
-                "uniqueItems": true
-              },
-              "point_feature_size": {
-                "type": "integer",
-                "description": "A number of channels of point feature layer.",
-                "default": 4
-              },
-              "max_voxel_size": {
-                "type": "integer",
-                "description": "A maximum size of voxel grid.",
-                "default": 40000
-              },
-              "point_cloud_range": {
-                "type": "array",
-                "description": "An array of distance ranges of each class, this must have same length with `class_names`.",
-                "default": [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
-              },
-              "voxel_size": {
-                "type": "array",
-                "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.",
-                "default": [0.32, 0.32, 10.0]
-              },
-              "down_sample_factor": {
-                "type": "integer",
-                "description": "A scale factor of downsampling points",
-                "default": 2,
-                "minimum": 1
-              },
-              "encoder_in_feature_size": {
-                "type": "integer",
-                "description": "A size of encoder input feature channels.",
-                "default": 9
-              }
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Parameters for Centerpoint Tiny ML model",
+  "type": "object",
+  "definitions": {
+    "centerpoint_tiny_ml_package": {
+      "type": "object",
+      "properties": {
+        "model_params": {
+          "type": "object",
+          "description": "Parameters for model configuration.",
+          "properties": {
+            "class_names": {
+              "type": "array",
+              "description": "An array of class names will be predicted.",
+              "default": ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"],
+              "uniqueItems": true
+            },
+            "point_feature_size": {
+              "type": "integer",
+              "description": "A number of channels of point feature layer.",
+              "default": 4
+            },
+            "max_voxel_size": {
+              "type": "integer",
+              "description": "A maximum size of voxel grid.",
+              "default": 40000
+            },
+            "point_cloud_range": {
+              "type": "array",
+              "description": "An array of distance ranges of each class, this must have same length with `class_names`.",
+              "default": [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+            },
+            "voxel_size": {
+              "type": "array",
+              "description": "An array of voxel grid sizes for PointPainting, this must have same length with `paint_class_names`.",
+              "default": [0.32, 0.32, 10.0]
+            },
+            "down_sample_factor": {
+              "type": "integer",
+              "description": "A scale factor of downsampling points",
+              "default": 2,
+              "minimum": 1
+            },
+            "encoder_in_feature_size": {
+              "type": "integer",
+              "description": "A size of encoder input feature channels.",
+              "default": 9
             }
           }
-        },
-        "required": ["model_params"]
-      }
-    },
-    "properties": {
-      "/**": {
-        "type": "object",
-        "properties": {
-          "ros__parameters": {
-            "$ref": "#/definitions/centerpoint_tiny_ml_package"
-          }
-        },
-        "required": ["ros__parameters"]
-      }
-    },
-    "required": ["/**"]
-  }
-  
\ No newline at end of file
+        }
+      },
+      "required": ["model_params"]
+    }
+  },
+  "properties": {
+    "/**": {
+      "type": "object",
+      "properties": {
+        "ros__parameters": {
+          "$ref": "#/definitions/centerpoint_tiny_ml_package"
+        }
+      },
+      "required": ["ros__parameters"]
+    }
+  },
+  "required": ["/**"]
+}

From e65212306c0f37f59c3748f7e71fc114ef355c6e Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 11 Apr 2024 11:33:53 +0900
Subject: [PATCH 14/20] docs: update pointpainting fusion doc

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../docs/pointpainting-fusion.md              | 27 +++++++++++--------
 1 file changed, 16 insertions(+), 11 deletions(-)

diff --git a/perception/image_projection_based_fusion/docs/pointpainting-fusion.md b/perception/image_projection_based_fusion/docs/pointpainting-fusion.md
index 5f5e45941c549..85e740d95ebd0 100644
--- a/perception/image_projection_based_fusion/docs/pointpainting-fusion.md
+++ b/perception/image_projection_based_fusion/docs/pointpainting-fusion.md
@@ -33,17 +33,22 @@ The lidar points are projected onto the output of an image-only 2d object detect
 
 ### Core Parameters
 
-| Name                            | Type   | Default Value | Description                                                 |
-| ------------------------------- | ------ | ------------- | ----------------------------------------------------------- |
-| `score_threshold`               | float  | `0.4`         | detected objects with score less than threshold are ignored |
-| `densification.world_frame_id`  | string | `map`         | the world frame id to fuse multi-frame pointcloud           |
-| `densification.num_past_frames` | int    | `1`           | the number of past frames to fuse with the current frame    |
-| `trt_precision`                 | string | `fp16`        | TensorRT inference precision: `fp32` or `fp16`              |
-| `encoder_onnx_path`             | string | `""`          | path to VoxelFeatureEncoder ONNX file                       |
-| `encoder_engine_path`           | string | `""`          | path to VoxelFeatureEncoder TensorRT Engine file            |
-| `head_onnx_path`                | string | `""`          | path to DetectionHead ONNX file                             |
-| `head_engine_path`              | string | `""`          | path to DetectionHead TensorRT Engine file                  |
-| `build_only`                    | bool   | `false`       | shutdown the node after TensorRT engine file is built       |
+| Name                                             | Type         | Default Value             | Description                                                 |
+| ------------------------------------------------ | ------------ | ------------------------- | ----------------------------------------------------------- |
+| `encoder_onnx_path`                              | string       | `""`                      | path to VoxelFeatureEncoder ONNX file                       |
+| `encoder_engine_path`                            | string       | `""`                      | path to VoxelFeatureEncoder TensorRT Engine file            |
+| `head_onnx_path`                                 | string       | `""`                      | path to DetectionHead ONNX file                             |
+| `head_engine_path`                               | string       | `""`                      | path to DetectionHead TensorRT Engine file                  |
+| `build_only`                                     | bool         | `false`                   | shutdown the node after TensorRT engine file is built       |
+| `trt_precision`                                  | string       | `fp16`                    | TensorRT inference precision: `fp32` or `fp16`              |
+| `post_process_params.score_threshold`            | double       | `0.4`                     | detected objects with score less than threshold are ignored |
+| `post_process_params.yaw_norm_thresholds`        | list[double] | [0.3, 0.3, 0.3, 0.3, 0.0] | An array of distance threshold values of norm of yaw [rad]. |
+| `post_process_params.iou_nms_target_class_names` | list[string] | ["CAR"]                   | An array of class names to be target in NMS.                |
+| `post_process_params.iou_nms_search_distance_2d` | double       | 10.0                      | A maximum distance value to search the nearest objects.     |
+| `post_process_params.iou_nms_threshold`          | double       | 0.1                       | A threshold value of NMS using IoU score.                   |
+| `post_process_params.has_twist`                  | boolean      | false                     | Indicates whether the model outputs twist value.            |
+| `densification_params.world_frame_id`            | string       | `map`                     | the world frame id to fuse multi-frame pointcloud           |
+| `densification_params.num_past_frames`           | int          | `1`                       | the number of past frames to fuse with the current frame    |
 
 ## Assumptions / Known limits
 

From 4823bdb1c8c881c9d39d2bf0a87d30a55045d6a9 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 11 Apr 2024 11:45:36 +0900
Subject: [PATCH 15/20] docs: update lidar centerpoint doc

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 perception/lidar_centerpoint/README.md | 30 ++++++++++++++------------
 1 file changed, 16 insertions(+), 14 deletions(-)

diff --git a/perception/lidar_centerpoint/README.md b/perception/lidar_centerpoint/README.md
index ed71349d5bd7f..09cee5266760c 100644
--- a/perception/lidar_centerpoint/README.md
+++ b/perception/lidar_centerpoint/README.md
@@ -30,20 +30,22 @@ We trained the models using <https://github.com/open-mmlab/mmdetection3d>.
 
 ### Core Parameters
 
-| Name                            | Type         | Default Value | Description                                                   |
-| ------------------------------- | ------------ | ------------- | ------------------------------------------------------------- |
-| `score_threshold`               | float        | `0.4`         | detected objects with score less than threshold are ignored   |
-| `densification_world_frame_id`  | string       | `map`         | the world frame id to fuse multi-frame pointcloud             |
-| `densification_num_past_frames` | int          | `1`           | the number of past frames to fuse with the current frame      |
-| `trt_precision`                 | string       | `fp16`        | TensorRT inference precision: `fp32` or `fp16`                |
-| `encoder_onnx_path`             | string       | `""`          | path to VoxelFeatureEncoder ONNX file                         |
-| `encoder_engine_path`           | string       | `""`          | path to VoxelFeatureEncoder TensorRT Engine file              |
-| `head_onnx_path`                | string       | `""`          | path to DetectionHead ONNX file                               |
-| `head_engine_path`              | string       | `""`          | path to DetectionHead TensorRT Engine file                    |
-| `nms_iou_target_class_names`    | list[string] | -             | target classes for IoU-based Non Maximum Suppression          |
-| `nms_iou_search_distance_2d`    | double       | -             | If two objects are farther than the value, NMS isn't applied. |
-| `nms_iou_threshold`             | double       | -             | IoU threshold for the IoU-based Non Maximum Suppression       |
-| `build_only`                    | bool         | `false`       | shutdown the node after TensorRT engine file is built         |
+| Name                                             | Type         | Default Value             | Description                                                   |
+| ------------------------------------------------ | ------------ | ------------------------- | ------------------------------------------------------------- |
+| `encoder_onnx_path`                              | string       | `""`                      | path to VoxelFeatureEncoder ONNX file                         |
+| `encoder_engine_path`                            | string       | `""`                      | path to VoxelFeatureEncoder TensorRT Engine file              |
+| `head_onnx_path`                                 | string       | `""`                      | path to DetectionHead ONNX file                               |
+| `head_engine_path`                               | string       | `""`                      | path to DetectionHead TensorRT Engine file                    |
+| `build_only`                                     | bool         | `false`                   | shutdown the node after TensorRT engine file is built         |
+| `trt_precision`                                  | string       | `fp16`                    | TensorRT inference precision: `fp32` or `fp16`                |
+| `post_process_params.score_threshold`            | double       | `0.4`                     | detected objects with score less than threshold are ignored   |
+| `post_process_params.yaw_norm_thresholds`        | list[double] | [0.3, 0.3, 0.3, 0.3, 0.0] | An array of distance threshold values of norm of yaw [rad].   |
+| `post_process_params.iou_nms_target_class_names` | list[string] | -                         | target classes for IoU-based Non Maximum Suppression          |
+| `post_process_params.iou_nms_search_distance_2d` | double       | -                         | If two objects are farther than the value, NMS isn't applied. |
+| `post_process_params.iou_nms_threshold`          | double       | -                         | IoU threshold for the IoU-based Non Maximum Suppression       |
+| `post_process_params.has_twist`                  | boolean      | false                     | Indicates whether the model outputs twist value.              |
+| `densification_params.world_frame_id`            | string       | `map`                     | the world frame id to fuse multi-frame pointcloud             |
+| `densification_params.num_past_frames`           | int          | `1`                       | the number of past frames to fuse with the current frame      |
 
 ### The `build_only` option
 

From 995726beea3cad8a3f1fcd210c73531bf7a9be61 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 18 Apr 2024 16:55:24 +0900
Subject: [PATCH 16/20] fix: change omp param

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/pointpainting.param.yaml           |  3 ++
 .../pointpainting_ml_package.param.yaml       |  3 --
 .../schema/pointpainting.schema.json          | 49 ++++++++++++-------
 3 files changed, 33 insertions(+), 22 deletions(-)

diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
index 94c03611adedb..f6c8a50e45147 100644
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -19,3 +19,6 @@
     densification_params:
       world_frame_id: "map"
       num_past_frames: 1
+    omp_params:
+      # omp params
+      num_threads: 1
diff --git a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
index 61d2124afb17a..0110f373de63d 100755
--- a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
@@ -9,6 +9,3 @@
       voxel_size: [0.32, 0.32, 8.0]
       downsample_factor: 1
       encoder_in_feature_size: 12
-    omp_params:
-      # omp params
-      num_threads: 1
diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
index 1bf093fcca4ad..d8e741d421df8 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
@@ -17,6 +17,23 @@
           "default": "fp16",
           "enum": ["fp32", "fp16"]
         },
+        "densification_params": {
+          "type": "object",
+          "description": "Parameters for pointcloud densification.",
+          "properties": {
+            "world_frame_id": {
+              "type": "string",
+              "description": "A name of frame id where world coordinates system is defined with respect to.",
+              "default": "map"
+            },
+            "num_past_frames": {
+              "type": "integer",
+              "description": "A number of past frames to be considered as same input frame.",
+              "default": 0,
+              "minimum": 0
+            }
+          }
+        },
         "post_process_params": {
           "type": "object",
           "properties": {
@@ -27,13 +44,6 @@
               "minimum": 0.0,
               "maximum": 1.0
             },
-            "yaw_norm_thresholds": {
-              "type": "array",
-              "description": "An array of distance threshold values of norm of yaw [rad].",
-              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
-              "minimum": 0.0,
-              "maximum": 1.0
-            },
             "circle_nms_dist_threshold": {
               "type": "number",
               "description": "",
@@ -60,6 +70,13 @@
               "minimum": 0.0,
               "maximum": 1.0
             },
+            "yaw_norm_thresholds": {
+              "type": "array",
+              "description": "An array of distance threshold values of norm of yaw [rad].",
+              "default": [0.3, 0.3, 0.3, 0.3, 0.0],
+              "minimum": 0.0,
+              "maximum": 1.0
+            },
             "has_variance": {
               "type": "boolean",
               "description": "Indicates whether the model outputs variance value.",
@@ -72,25 +89,19 @@
             }
           }
         },
-        "densification_params": {
+        "omp_params": {
           "type": "object",
-          "description": "Parameters for pointcloud densification.",
           "properties": {
-            "world_frame_id": {
-              "type": "string",
-              "description": "A name of frame id where world coordinates system is defined with respect to.",
-              "default": "map"
-            },
-            "num_past_frames": {
+            "num_threads": {
               "type": "integer",
-              "description": "A number of past frames to be considered as same input frame.",
-              "default": 0,
-              "minimum": 0
+              "description": "The number of threads that is set to the environment variable OMP_NUM_THREADS.",
+              "default": 1,
+              "minimum": 1
             }
           }
         }
       },
-      "required": ["post_process_params", "densification_params"]
+      "required": ["densification_params", "post_process_params", "omp_params"]
     }
   },
   "properties": {

From dc19732f85af09597f733374dcd3a9487f410093 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Thu, 18 Apr 2024 21:02:24 +0900
Subject: [PATCH 17/20] fix:change twist and variance to model params

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/pointpainting.param.yaml           |  2 --
 .../pointpainting_ml_package.param.yaml       |  2 ++
 .../src/pointpainting_fusion/node.cpp         |  4 +--
 .../config/centerpoint.param.yaml             |  2 --
 .../config/centerpoint_ml_package.param.yaml  |  2 ++
 .../config/centerpoint_sigma.param.yaml       | 27 -------------------
 .../centerpoint_sigma_ml_package.param.yaml   | 12 +++++++++
 .../centerpoint_tiny_ml_package.param.yaml    |  2 ++
 perception/lidar_centerpoint/src/node.cpp     |  4 +--
 9 files changed, 22 insertions(+), 35 deletions(-)
 delete mode 100644 perception/lidar_centerpoint/config/centerpoint_sigma.param.yaml
 create mode 100644 perception/lidar_centerpoint/config/centerpoint_sigma_ml_package.param.yaml

diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
index f6c8a50e45147..61dee9005fc7a 100644
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -14,8 +14,6 @@
       iou_nms_threshold: 0.1
       score_threshold: 0.35
       yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-      has_twist: false
-      has_variance: false
     densification_params:
       world_frame_id: "map"
       num_past_frames: 1
diff --git a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
index 0110f373de63d..ca76a8ecf2dac 100755
--- a/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting_ml_package.param.yaml
@@ -9,3 +9,5 @@
       voxel_size: [0.32, 0.32, 8.0]
       downsample_factor: 1
       encoder_in_feature_size: 12
+      has_twist: false
+      has_variance: false
diff --git a/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp b/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp
index f06521526fb90..2c33df0b65afc 100644
--- a/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp
+++ b/perception/image_projection_based_fusion/src/pointpainting_fusion/node.cpp
@@ -140,8 +140,8 @@ PointPaintingFusionNode::PointPaintingFusionNode(const rclcpp::NodeOptions & opt
       isClassTable_.erase(cls);
     }
   }
-  has_twist_ = this->declare_parameter<bool>("post_process_params.has_twist");
-  has_variance_ = this->declare_parameter<bool>("post_process_params.has_variance");
+  has_twist_ = this->declare_parameter<bool>("model_params.has_twist");
+  has_variance_ = this->declare_parameter<bool>("model_params.has_variance");
   const std::size_t point_feature_size = static_cast<std::size_t>(
     this->declare_parameter<std::int64_t>("model_params.point_feature_size"));
   const std::size_t max_voxel_size =
diff --git a/perception/lidar_centerpoint/config/centerpoint.param.yaml b/perception/lidar_centerpoint/config/centerpoint.param.yaml
index 4c69bae9f124a..b08273986d86d 100644
--- a/perception/lidar_centerpoint/config/centerpoint.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint.param.yaml
@@ -15,8 +15,6 @@
       iou_nms_threshold: 0.1
       score_threshold: 0.35
       yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-      has_variance: false
-      has_twist: false
     densification_params:
       world_frame_id: map
       num_past_frames: 1
diff --git a/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml b/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
index 4639cd4b6817f..c8d17890e33be 100644
--- a/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_ml_package.param.yaml
@@ -8,3 +8,5 @@
       voxel_size: [0.32, 0.32, 10.0]
       downsample_factor: 1
       encoder_in_feature_size: 9
+      has_variance: false
+      has_twist: false
diff --git a/perception/lidar_centerpoint/config/centerpoint_sigma.param.yaml b/perception/lidar_centerpoint/config/centerpoint_sigma.param.yaml
deleted file mode 100644
index c217f6321381a..0000000000000
--- a/perception/lidar_centerpoint/config/centerpoint_sigma.param.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-/**:
-  ros__parameters:
-    class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
-    point_feature_size: 4
-    max_voxel_size: 40000
-    point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
-    voxel_size: [0.32, 0.32, 10.0]
-    downsample_factor: 1
-    encoder_in_feature_size: 9
-    # post-process params
-    circle_nms_dist_threshold: 0.5
-    iou_nms_target_class_names: ["CAR"]
-    iou_nms_search_distance_2d: 10.0
-    iou_nms_threshold: 0.1
-    yaw_norm_thresholds: [0.3, 0.3, 0.3, 0.3, 0.0]
-    score_threshold: 0.35
-    has_variance: true
-    has_twist: true
-    trt_precision: fp16
-    densification_num_past_frames: 1
-    densification_world_frame_id: map
-
-    # weight files
-    encoder_onnx_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).onnx"
-    encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
-    head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
-    head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
diff --git a/perception/lidar_centerpoint/config/centerpoint_sigma_ml_package.param.yaml b/perception/lidar_centerpoint/config/centerpoint_sigma_ml_package.param.yaml
new file mode 100644
index 0000000000000..c8d17890e33be
--- /dev/null
+++ b/perception/lidar_centerpoint/config/centerpoint_sigma_ml_package.param.yaml
@@ -0,0 +1,12 @@
+/**:
+  ros__parameters:
+    model_params:
+      class_names: ["CAR", "TRUCK", "BUS", "BICYCLE", "PEDESTRIAN"]
+      point_feature_size: 4
+      max_voxel_size: 40000
+      point_cloud_range: [-76.8, -76.8, -4.0, 76.8, 76.8, 6.0]
+      voxel_size: [0.32, 0.32, 10.0]
+      downsample_factor: 1
+      encoder_in_feature_size: 9
+      has_variance: false
+      has_twist: false
diff --git a/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml b/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
index 5c6b6dddb5772..2a3f9c8e75290 100644
--- a/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint_tiny_ml_package.param.yaml
@@ -8,3 +8,5 @@
       voxel_size: [0.32, 0.32, 10.0]
       downsample_factor: 2
       encoder_in_feature_size: 9
+      has_variance: false
+      has_twist: false
diff --git a/perception/lidar_centerpoint/src/node.cpp b/perception/lidar_centerpoint/src/node.cpp
index eac807bc9527e..d0abf72d888d3 100644
--- a/perception/lidar_centerpoint/src/node.cpp
+++ b/perception/lidar_centerpoint/src/node.cpp
@@ -55,10 +55,10 @@ LidarCenterPointNode::LidarCenterPointNode(const rclcpp::NodeOptions & node_opti
   const std::string head_onnx_path = this->declare_parameter<std::string>("head_onnx_path");
   const std::string head_engine_path = this->declare_parameter<std::string>("head_engine_path");
   class_names_ = this->declare_parameter<std::vector<std::string>>("model_params.class_names");
-  has_twist_ = this->declare_parameter<bool>("post_process_params.has_twist");
+  has_twist_ = this->declare_parameter<bool>("model_params.has_twist");
   const std::size_t point_feature_size = static_cast<std::size_t>(
     this->declare_parameter<std::int64_t>("model_params.point_feature_size"));
-  has_variance_ = this->declare_parameter<bool>("post_process_params.has_variance");
+  has_variance_ = this->declare_parameter<bool>("model_params.has_variance");
   const std::size_t max_voxel_size =
     static_cast<std::size_t>(this->declare_parameter<std::int64_t>("model_params.max_voxel_size"));
   const auto point_cloud_range =

From ad45a92da551d71abcfa00b76bd4b8e40aca5244 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Wed, 24 Apr 2024 16:41:06 +0900
Subject: [PATCH 18/20] fix: keep build_only in launch

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../config/pointpainting.param.yaml                      | 1 -
 .../launch/pointpainting_fusion.launch.xml               | 9 +++++++++
 .../schema/pointpainting.schema.json                     | 5 -----
 .../lidar_centerpoint/config/centerpoint.param.yaml      | 1 -
 .../launch/lidar_centerpoint.launch.xml                  | 7 +++++++
 .../lidar_centerpoint/schema/centerpoint.schemal.json    | 5 -----
 6 files changed, 16 insertions(+), 12 deletions(-)

diff --git a/perception/image_projection_based_fusion/config/pointpainting.param.yaml b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
index 61dee9005fc7a..30136bc8f47d0 100644
--- a/perception/image_projection_based_fusion/config/pointpainting.param.yaml
+++ b/perception/image_projection_based_fusion/config/pointpainting.param.yaml
@@ -4,7 +4,6 @@
     encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
     head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
     head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
-    build_only: false
     trt_precision: fp16
     post_process_params:
       # post-process params
diff --git a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
index db6be77d2ff74..a22b2c2a13bda 100644
--- a/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
+++ b/perception/image_projection_based_fusion/launch/pointpainting_fusion.launch.xml
@@ -25,6 +25,7 @@
   <arg name="ml_package_param_path" default="$(var model_path)/$(var model_name)_ml_package.param.yaml"/>
   <arg name="class_remapper_param_path" default="$(var model_path)/detection_class_remapper.param.yaml"/>
   <arg name="sync_param_path" default="$(find-pkg-share image_projection_based_fusion)/config/roi_sync.param.yaml"/>
+  <arg name="build_only" default="false" description="shutdown node after TensorRT engine file is built"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>
   <arg name="pointcloud_container_name" default="pointcloud_container" description="pointcloud_container name"/>
@@ -52,6 +53,10 @@
         <param from="$(var sync_param_path)"/>
         <param from="$(var class_remapper_param_path)"/>
         <param name="rois_number" value="$(var input/rois_number)"/>
+
+        <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6169 -->
+        <param name="build_only" value="$(var build_only)"/>
+
         <!-- rois, camera and info -->
         <param name="input/rois0" value="$(var input/rois0)"/>
         <param name="input/camera_info0" value="$(var input/camera_info0)"/>
@@ -90,6 +95,10 @@
       <param from="$(var sync_param_path)"/>
       <param from="$(var class_remapper_param_path)"/>
       <param name="rois_number" value="$(var input/rois_number)"/>
+
+      <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6169 -->
+      <param name="build_only" value="$(var build_only)"/>
+
       <!-- rois, camera and info -->
       <param name="input/rois0" value="$(var input/rois0)"/>
       <param name="input/camera_info0" value="$(var input/camera_info0)"/>
diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
index d8e741d421df8..9143799732acd 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
@@ -6,11 +6,6 @@
     "pointpainting": {
       "type": "object",
       "properties": {
-        "build_only": {
-          "type": "boolean",
-          "description": "shutdown the node after TensorRT engine file is built.",
-          "default": false
-        },
         "trt_precision": {
           "type": "string",
           "description": "TensorRT inference precision.",
diff --git a/perception/lidar_centerpoint/config/centerpoint.param.yaml b/perception/lidar_centerpoint/config/centerpoint.param.yaml
index b08273986d86d..17ed9a447cdb8 100644
--- a/perception/lidar_centerpoint/config/centerpoint.param.yaml
+++ b/perception/lidar_centerpoint/config/centerpoint.param.yaml
@@ -5,7 +5,6 @@
     encoder_engine_path: "$(var model_path)/pts_voxel_encoder_$(var model_name).engine"
     head_onnx_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).onnx"
     head_engine_path: "$(var model_path)/pts_backbone_neck_head_$(var model_name).engine"
-    build_only: false
     trt_precision: fp16
     post_process_params:
       # post-process params
diff --git a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
index c3565b386060e..7b2441578a14c 100644
--- a/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
+++ b/perception/lidar_centerpoint/launch/lidar_centerpoint.launch.xml
@@ -8,6 +8,7 @@
   <arg name="model_param_path" default="$(find-pkg-share lidar_centerpoint)/config/centerpoint.param.yaml"/>
   <arg name="ml_package_param_path" default="$(var model_path)/$(var model_name)_ml_package.param.yaml"/>
   <arg name="class_remapper_param_path" default="$(var model_path)/detection_class_remapper.param.yaml"/>
+  <arg name="build_only" default="false" description="shutdown node after TensorRT engine file is built"/>
 
   <arg name="use_pointcloud_container" default="false" description="use pointcloud_container"/>
   <arg name="pointcloud_container_name" default="pointcloud_container" description="pointcloud_container name"/>
@@ -20,6 +21,9 @@
         <param from="$(var model_param_path)" allow_substs="true"/>
         <param from="$(var ml_package_param_path)" allow_substs="true"/>
         <param from="$(var class_remapper_param_path)"/>
+
+        <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6167 -->
+        <param name="build_only" value="$(var build_only)"/>
       </composable_node>
     </load_composable_node>
   </group>
@@ -30,6 +34,9 @@
       <param from="$(var model_param_path)" allow_substs="true"/>
       <param from="$(var ml_package_param_path)" allow_substs="true"/>
       <param from="$(var class_remapper_param_path)"/>
+
+      <!-- This parameter shall NOT be included in param file. See also: https://github.com/autowarefoundation/autoware.universe/pull/6167 -->
+      <param name="build_only" value="$(var build_only)"/>
     </node>
   </group>
 </launch>
diff --git a/perception/lidar_centerpoint/schema/centerpoint.schemal.json b/perception/lidar_centerpoint/schema/centerpoint.schemal.json
index d8abf5c00be0e..b11c115cc2466 100644
--- a/perception/lidar_centerpoint/schema/centerpoint.schemal.json
+++ b/perception/lidar_centerpoint/schema/centerpoint.schemal.json
@@ -6,11 +6,6 @@
     "centerpoint": {
       "type": "object",
       "properties": {
-        "build_only": {
-          "type": "boolean",
-          "description": "shutdown the node after TensorRT engine file is built.",
-          "default": false
-        },
         "trt_precision": {
           "type": "string",
           "description": "TensorRT inference precision.",

From ef78f81041bdb4f778425485579e9f3034417eed Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Fri, 26 Apr 2024 20:09:49 +0900
Subject: [PATCH 19/20] fix: schema check

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../schema/pointpainting_ml_package.schema.json     | 13 +------------
 1 file changed, 1 insertion(+), 12 deletions(-)

diff --git a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
index 9617bad77ea21..05ed765ed2ad0 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
@@ -54,20 +54,9 @@
               "default": 12
             }
           }
-        },
-        "omp_params": {
-          "type": "object",
-          "properties": {
-            "num_threads": {
-              "type": "integer",
-              "description": "The number of threads that is set to the environment variable OMP_NUM_THREADS.",
-              "default": 1,
-              "minimum": 1
-            }
-          }
         }
       },
-      "required": ["model_params", "omp_params"]
+      "required": ["model_params"]
     }
   },
   "properties": {

From 70f4388aea43f3d677207192ae211ec6b7839b60 Mon Sep 17 00:00:00 2001
From: badai-nguyen <dai.nguyen@tier4.jp>
Date: Fri, 26 Apr 2024 20:54:28 +0900
Subject: [PATCH 20/20] chore: temporary remove schema required

Signed-off-by: badai-nguyen <dai.nguyen@tier4.jp>
---
 .../schema/pointpainting.schema.json                            | 2 +-
 .../schema/pointpainting_ml_package.schema.json                 | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/perception/image_projection_based_fusion/schema/pointpainting.schema.json b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
index 9143799732acd..57d8bc500f3c6 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting.schema.json
@@ -96,7 +96,7 @@
           }
         }
       },
-      "required": ["densification_params", "post_process_params", "omp_params"]
+      "required": []
     }
   },
   "properties": {
diff --git a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
index 05ed765ed2ad0..0f89d050c1828 100644
--- a/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
+++ b/perception/image_projection_based_fusion/schema/pointpainting_ml_package.schema.json
@@ -56,7 +56,7 @@
           }
         }
       },
-      "required": ["model_params"]
+      "required": []
     }
   },
   "properties": {