8.1 同时打开摄像头和获取点云数据(python)

代码如下:

#!/usr/bin/env python
# -*- coding: utf-8 -*-

# coding:utf-8
import cv2

import rospy
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
from std_msgs.msg import Header
from visualization_msgs.msg import Marker, MarkerArray
from geometry_msgs.msg import Point
 
#import torch
import numpy as np
import sys
import time
print(sys.version)
#from recon_barriers_model import recon_barriers
#from pclpy import pcl
from queue import Queue
 
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
#%matplotlib
 
#聚类的数据处理
def cluster(points, radius=0.2):
    """
    points: pointcloud
    radius: max cluster range
    """
    items = []
    while len(points)>1:
        item = np.array([points[0]])
        base = points[0]
        points = np.delete(points, 0, 0)
        distance = (points[:,0]-base[0])**2+(points[:,1]-base[1])**2+(points[:,2]-base[2])**2
        infected_points = np.where(distance <= radius**2)
        item = np.append(item, points[infected_points], axis=0)
        border_points = points[infected_points]
        points = np.delete(points, infected_points, 0)
        while len(border_points) > 0:
            border_base = border_points[0]
            border_points = np.delete(border_points, 0, 0)
            border_distance = (points[:,0]-border_base[0])**2+(points[:,1]-border_base[1])**2
            border_infected_points = np.where(border_distance <= radius**2)
            item = np.append(item, points[border_infected_points], axis=0)
            border_points = points[border_infected_points]
            points = np.delete(points, border_infected_points, 0)
        items.append(item)
    return items
 
 
#点云的获取的部分数据的过滤
def recon_barriers(filename,msg_1s):
 
    pcl_msg = pc2.read_points(filename, skip_nans=False, field_names=(
        "x", "y", "z", "intensity","ring"))
    
    np_p_2 = np.array(list(pcl_msg), dtype=np.float32)
    print("===>",np_p_2.shape)
    
    ss=np.where(▼显示>2 and s[1]<3 and s[-1]>-3 and s[2]>-0.5 for s in np_p_2])
    #print(len(ss[0]))
    #print(ss[0])
    hh=np_p_2[ss]
    print(hh.shape)
    return hh
 
def velo_callback(msg):
    pcl_msg = pc2.read_points(msg, skip_nans=False, field_names=(
       "x", "y", "z", "intensity","ring"))
    print(type(pcl_msg))
    global  max_marker_size_,frequence
    frequence=1
    if frequence % 2 == 0:
        q.put(msg)
        msg_1s = q.get()
    else:
        q.put(msg)
        msg_1s = q.get()
        ans = recon_barriers(msg,msg_1s)
        
        
        item=cluster(ans, radius=0.2)
        m_item=[]
        for items in item:
            print("..............",items.shape)
            #x,y,z=int(items[:,:1].sum().mean())
                                                     
            x,y,z,r=items[:,:1].mean(),items[:,1:2].mean(),items[:,2:3].mean(),items[:,3:4].mean()
            m_item.append([x,y,z])
        
        print("=====+++++>>>>",len(item))
        print(len(item[0]))
        print(m_item)
        
        fig = plt.figure()
        ax = Axes3D(fig)
        fig = plt.figure()
        ax = Axes3D(fig)
        #ax.scatter(item[:,0], item[:,1], item[:,2], s=1)
        #fig.show()
 
 
if __name__ == '__main__':
    #  code added for using ROS
    global  max_marker_size_,frequence
    cap = cv2.VideoCapture("/dev/video61")
    q = Queue()
    q.put(None)
    rospy.init_node('lidar_node')
    
    while (cap.isOpened()):

        ret, frame = cap.read()
    
        frame = cv2.rotate(frame, 0, dst=None)  # 视频是倒着的,要对视频进行两次90度的翻转
    
        frame = cv2.rotate(frame, 0, dst=None)  # 视频是倒着的,要对视频进行两次90度的翻转

        cv2.imshow("src_image", frame)

        cv2.waitKey(1)
    
    
    
        sub_ = rospy.Subscriber("livox/lidar", PointCloud2,
                            velo_callback, queue_size=100)
        pub_arr_bbox = rospy.Publisher(
        "visualization_marker", MarkerArray, queue_size=100)
        print("ros_node has started!")
        rospy.spin()
    cap.release()

    cv2.destroyAllWindows()
 
 
 
 

可以自己获取实时时间存取保存图片和点云数据。