点云叠帧_可视化的要求—真值的叠帧

发布时间 2023-07-26 18:06:41作者: 辰令

雷达Fussion

传感器包括可见光相机,激光雷达和毫米波雷达
 Feature-level Fusion(特征层融合)、Data-level Fusion (数据层融合)和 Decision-level Fusion (决策层融合)
 时序对齐-坐标统一(时间和空间)
  radar  lidar laser一般指激光  camera  
  角分辨率-测距距离-
array-Radar 相阵雷达  相控阵 相控阵雷达即 相位控制电子扫描阵列雷达-相位可以控制的天线阵
机械扫描 
  波长--频率 多普勒效应原理 
 被动式红外探头就是探测人体发射红外线而进行工作的  
 照明雷达感应器就是利用这种技术来达到智能控制照明的目的
 后融合
   图像模型--目标检测
   点云聚类--原始点云-处理漏检
 前融合

4D毫米波

-点云稠密--上千个带速度的点
	  俯仰角(pitch)
	  模型出距离-还是后处理出距离 
    4D点云毫米波雷达作为一种测距长、可测俯仰角、能满足多目标识别的新型雷达传感器
      自适应巡航控制(Adaptive Cruise Control,ACC)
	 3D是指它可以测量距离、方位和速度,这三个维度的信息。4
	    4D毫米波雷达则又在此基础上,增加了俯仰角的测量能力,也就是能够检测到障碍物的高度信息。 
点云密度和点云质量,
  雷达波数--采样点数 毫米波雷达的点包括 X、Y(也可能有 Z)坐标,RCS(物体反射面积)和 Doppler(物体速度)。
  毫米波雷达的点云非常稀疏(几十 vs 几千	  
 超声波是一种波长极短的机械波,在空气中波长一般短于2cm(厘米)它必须依靠介质进行传播
 机械波由机械振动产生,机械波的传播需要特定的介质,在不同介质中的传播速度也不同 声波、水波 超声波 声呐
 电磁波由电磁振荡产生;	电磁场
    按振动方向与传播方向的关系来分:主要有三种――横波、纵波、球面波
 光波,通常是指电磁波谱中的可见光。可见光通常是指频率范围在3.9×1014~7.5×1014Hz之间的电磁波, 
 波束 
   多普勒效应(Dopler Effect)  
    采样点数为512,发波个数为128,典型的3T4R前端射频芯片

防御-空天防御体系

 顿河”2远程警戒雷达系统-S-500绰号“普罗米修斯”
 宙斯盾 
 远程预警雷达系统
   破片杀伤  爆破杀伤方式-产生大量碎片
   动能杀伤战斗部(也就是通过撞击直接摧毁来袭导弹)
军用雷达动辄上百个天线,使得其角分辨率很高。但车载雷达体积尺寸受限,不能简单靠加天线去做这件事 


机控和线控 
 机控:即机械控制方式进行功能操作,也就是使用开关或按键操作一些常见的功能。 
     机械回路或者液压回路
 线控:即线路控制方式进行功能操作或处理。类似于电启动、数控显示、门开关等低压信..
    线控转向、线控油门、线控制动、线控悬架、线控换挡等

出行

安全、顺畅和自由 
安全第一位,无论何时、无论何地,拥有想走就走的自由-这是未来出行的最终目标

1.点云的坐标变换

 变换矩阵是 先旋转再平移-变换矩阵=先旋转再平移
       P = R*p + t1
  
 先平移再旋转的话
       P= R(p+t2)=R*p +R*t2 
	   
 其中平移矩阵之间的关系是 t1 = R*t2
01.点云变换
    直接点乘旋转矩阵

2.真值框的坐标变换

   xyz
   yaw
   from scipy.spatial.transform import Rotation 
   xyz旋转矩阵,yaw和角度之间变换
   
 import numpy as np	
 from scipy.spatial.transform import Rotation as R
 Rmatrix = R.from_euler('xyz',[0,0,-np.pi/2])
 print(Rmatrix.as_matrix())

示例代码

import numpy as np
import open3d as o3d
from scipy.spatial.transform import Rotation 
import argparse
import os
import json
from pathlib import Path
from tqdm import tqdm
import glob
import pickle as pkl

#######################
class Pose:
    def __init__(self, xyzw=np.float32([0., 0., 0., 1.]), tvec=np.float32([0., 0., 0.])):
        assert isinstance(xyzw, np.ndarray)
        assert isinstance(tvec, np.ndarray)
        self.quat = xyzw
        self.tvec = tvec

    def __repr__(self):
        formatter = {'float_kind': lambda x: '%.2f' % x}
        tvec_str = np.array2string(self.tvec, formatter=formatter)
        return 'xyzw: {}, tvec: ({})'.format(self.quat, tvec_str)

    @property
    def matrix(self):
        tr_matrix = np.eye(4,dtype=np.float32)
        tr_matrix[:3,0:3] = (Rotation.from_quat(self.quat)).as_matrix()
        tr_matrix[:3, 3] = self.tvec
        return tr_matrix

    @classmethod
    def from_matrix(cls, transformation_matrix):
        return cls(xyzw =Rotation.from_matrix(transformation_matrix[:3, :3]).as_quat() , tvec=np.float32(transformation_matrix[:3, 3]))

    def inverse(self):
        data = np.linalg.inv(self.matrix)   
        qinv =  Rotation.from_matrix(data[0:3,0:3]).as_quat()
        return self.__class__(qinv,data[0:3,3])

    def transform_points(self, points):
        points_homo = np.hstack([points[:, :3], np.ones((points.shape[0], 1), dtype=np.float32)])
        return np.dot(self.matrix , points_homo.T).T[:, :3]

    def transform_gt_xyz_points(self, gt_labels):
        gt_labels= np.array(gt_labels)
        points_homo = np.hstack([gt_labels[:, :3], np.ones((gt_labels.shape[0], 1), dtype=np.float32) ])
        result = np.dot(self.matrix , points_homo.T).T
        return result 

    def transform_gt_yaw(self, gt_labels):
        euler=(Rotation.from_matrix(self.matrix[0:3,0:3])).as_euler("xyz")
        gt_labels[:,6]=gt_labels[:,6]+euler[2] 
        return gt_labels[:,6]      

############################################ 
def load_pcd(pcd):
    pc = o3d.t.io.read_point_cloud(pcd)
    xyz = pc.point['positions'].numpy()
    intensity = pc.point['intensity'].numpy()
    points = np.hstack([xyz, intensity])
    points = points.astype(np.float32)
    return points

def save_pcd(pc, path):
    pcd = o3d.t.geometry.PointCloud()
    pcd.point['positions'] = o3d.core.Tensor(pc[:, :3])
    pcd.point['intensity'] = o3d.core.Tensor(pc[:, 3].reshape(-1, 1))
    o3d.t.io.write_point_cloud(path, pcd)


def load_json_label(gt_json_path):
    gt_json_path = Path(gt_json_path) 
    if gt_json_path.exists():
        gt_npy = []
        gt_boxes = json.load(open(gt_json_path, 'r'))['labels']
        for gtb in gt_boxes:
            x, y, z, l, w, h,yaw=center_x, center_y, center_z, l, w, h,yaw
            gt_npy.append([x, y, z,, l, w, h,yaw])
        gt_npy = np.array(gt_npy)
        return gt_npy


def save_pkl_label(data_path, pkl_name,boxes):
    os.makedirs(data_path, exist_ok=True)
    pred_path_file =os.path.join(data_path,pkl_name)
    fw = open(pred_path_file,'wb')
    pkl.dump(boxes,fw)
  

def load_pkl_label(pkl_name):
    gt_path =  Path(pkl_name) 
    if gt_path.exists():
        gt_npy = []
        gt_boxes = pkl.load(open(gt_path._str, 'rb'))
        for gtb in gt_boxes:
            gt_npy.append([gtb[0], gtb[1], gtb[2], gtb[3], gtb[4],gtb[5],gtb[6] ])
        gt_boxes = np.array(gt_npy)
        return gt_boxes

#############################################
def merge_pcd_or_annotation(root,step,sensor_data_dir,merge_type):
    print("merge_type:",merge_type)
    for batch_nm in os.listdir(root):
        if merge_type=="pcd":
            merge_dir_nm="perception"
            merge_file_flag='pcd'
        else:
            merge_dir_nm="annotations"
            merge_file_flag='json'
        root_path = os.pathjoin(root, batch_nm)
        pcds = glob.glob(root_path + r'/'+merge_dir_nm+r'/*.'+ merge_file_flag )
        timestamps = [os.path.basename(pcd).split(r'.'+merge_file_flag)[0] for pcd in pcds]
        timestamps = sorted(timestamps)
        merge_path = os.path.join(root_path, 'merge_'+merge_dir_nm+ '_'+ str(step))
        os.makedirs(merge_path, exist_ok=True)
        print("merge_path: ",merge_path)
        poses_dict = {}
        for cur_idx in  tqdm(range(len(timestamps))):
            ts = timestamps[cur_idx]
            sensor_data_path = os.pathjoin(root_path,  sensor_data_dir, ts+'.json')
            info = json.load(open(sensor_data_path, 'r'))
            lidar_info = info['lidar']['pose']
            poses_dict[ts] = Pose.from_matrix(np.array(lidar_info))

        cur_idx = 0
        while(cur_idx < len(timestamps)):
            target_ts = timestamps[cur_idx]
            selected = range(max(0, cur_idx), min(cur_idx + step, len(timestamps)))
            cur_idx += step
            timestamps_window = [timestamps[s] for s in selected]
            if len(timestamps_window) < 2:
                break
            world_to_target_pose = poses_dict[target_ts].inverse()
            tracks = []
            if merge_type=="pcd":
                for i, ts in enumerate(timestamps_window):
                    pcd_path = os.pathjoin(root_path, merge_dir_nm, ts+r'.'+ merge_file_flag)
                    points = load_pcd(pcd_path)
                    pose = poses_dict[ts]
                     # 转到世界坐标系 
                    points[:, :3] = pose.transform_points(points) 
                    track = { 'points': points,  'pose': pose }
                    tracks.append(track)
                all_points = np.zeros((0, 4), dtype=np.float32)
                for i, track in enumerate(tracks):
                    points = track['points']
                    points[:, :3] = world_to_target_pose.transform_points(points)
                    all_points = np.vstack([all_points, points])
                save_pcd(all_points[:, :4], os.path.join(merge_path,str(target_ts)+'.pcd') )  
            else:
                for i, ts in enumerate(timestamps_window):  
                    json_path = os.pathjoin(root_path, merge_dir_nm, ts+r'.'+ merge_file_flag)
                    gt_boxes = load_json_label(json_path)
                    pose = poses_dict[ts]
                    # 转到世界坐标系  点和变换矩阵相互运算-角度和旋转矩阵的欧拉角的yaw进行运算
                    gt_boxes[:, :3] = pose.transform_gt_xyz_points(gt_boxes)[:,:3]  
                    gt_boxes[:, 6] = pose.transform_gt_yaw(gt_boxes)  
                    track = { 'gt_box': gt_boxes,  'pose': pose }
                    tracks.append(track)
                all_gt_abel  = np.zeros((0, 7), dtype=np.float32)
                for i, track in enumerate(tracks):
                     # 转到自车
                    gt_points = np.array(track['gt_box'])
                    gt_points[:, :3] = world_to_target_pose.transform_gt_xyz_points(gt_points)[:,:3]   
                    gt_points[:, 6] = world_to_target_pose.transform_gt_yaw(gt_points)  
                    all_gt_abel = np.vstack([all_gt_abel, gt_points])
                save_pkl_label(merge_path, str((target_ts)+'.pkl'),all_gt_abel)


def main():
    parser = argparse.ArgumentParser(description='arg parser')
    parser.add_argument('--root', type=str, default='/data/set')
    parser.add_argument('--sensor', type=str, default='data')
    parser.add_argument('--step', type=int, default=2)
    parser.add_argument('--type', type=str, default="pcd")

    args = parser.parse_args()
	
    top_batch_set_root = args.root
    step_nm = args.step
    calib_data_dir = args.sensor
    f_merge_type = args.type
	
    merge_pcd_or_annotation(top_batch_set_root,step_nm,calib_data_dir,f_merge_type)
 
  
if __name__ == '__main__':
    main()

参考

 Toyota Research Institute - Machine Learning 丰田 https://github.com/TRI-ML
   https://github.com/TRI-ML/dd3d/blob/main/tridet/structures/pose.py  
 韩松团队MIT HAN Lab
    https://github.com/mit-han-lab/bevfusion/blob/main/mmdet3d/core/utils/visualize.py
  OpenMMLab是一个适用于学术研究和工业应用的开源算法体系
    https://github.com/open-mmlab/OpenPCDet	
 https://github.com/zhulf0804/PointPillars/blob/main/utils/vis_o3d.py