txt—>bin :
#!/usr/bin/env python
# coding: utf-8
import shutil
import numpy as np
import os
import glob
#from tqdm import tqdm
import multiprocessing
from argparse import ArgumentParser, SUPPRESS
def build_argparser():
parser = ArgumentParser(add_help=False)
args = parser.add_argument_group('Options')
args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Show this help message and exit.')
args.add_argument("-i", "--original_txt_path", help="Required. The absolute path of the input lidar .txt format data.", required=True, type=str)
args.add_argument("-o", "--now_bin_path",help="Required. The absolute path of the output lidar .bin format data.", required=True, type=str)
args.add_argument("-n", "--num_processes",help="Required. Number of processes,recommended 4-36", required=True, type=int)
return parser
args = build_argparser().parse_args()
original_path = args.original_txt_path #后面加/
now_path = args.now_bin_path #"/media/ls/871d73ac-db1f-4753-b7fd-32262a9e36e0/Wsl/Wsl/PointCloud/Makedata/data/cloud_txt"
num_processes = args.num_processes
def try_multiP_read_txt_create_bin(txt_path,now_path):
with open(txt_path, 'r') as f:
datas = f.readlines()
txt_name = ((txt_path.split('/')[-1]).split('.')[-2]).split(' ')
j = 0
for i in range(len(datas)):
data = datas[i].replace('n', '').split(' ')
if len(data) == 1:
index = i+1+int(data[0])
bin_path = now_path + os.sep + txt_name[0] + "_"+ txt_name[1] + "_" + "%04d"%(j+1)+ "__"+ data[0] +".bin"
frame_datas = datas[i+1:index]
#print(frame_datas)
frame_datas = np.array([line.replace('n', '').split(' ') for line in frame_datas]).astype(np.float32)
frame_datas.tofile(bin_path)
print(bin_path)
j += 1
def read_txt_create_bin(original_path,now_path):
if os.path.exists(now_path):
shutil.rmtree(now_path)
os.mkdir(now_path)
pool = multiprocessing.Pool(processes=num_processes)
for txt_path in sorted(glob.glob(original_path + '/'+'*.txt')):
pool.apply_async(try_multiP_read_txt_create_bin, (txt_path,now_path))
pool.close()
pool.join()
read_txt_create_bin(original_path,now_path)
print("All files have been processed!!!")
print("All files have been processed!!!")
print("All files have been processed!!!")
bin—>pcd :
#!/usr/bin/env python
# coding: utf-8
import shutil
import numpy as np
import os
import glob
from tqdm import tqdm
import multiprocessing
from argparse import ArgumentParser, SUPPRESS
def build_argparser():
parser = ArgumentParser(add_help=False)
args = parser.add_argument_group('Options')
args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Show this help message and exit.')
args.add_argument("-i", "--original_bin_path", help="Required. The absolute path of the input lidar .bin format data.", required=True, type=str)
args.add_argument("-o", "--now_pcd_path",help="Required. The absolute path of the output lidar .pcd format data.", required=True, type=str)
args.add_argument("-n", "--num_processes",help="Required. Number of processes,recommended 4-16", required=True, type=int)
return parser
args = build_argparser().parse_args()
original_bin_path = args.original_bin_path #后面加/
now_pcd_path = args.now_pcd_path #"/media/ls/871d73ac-db1f-4753-b7fd-32262a9e36e0/Wsl/Wsl/PointCloud/Makedata/data/cloud_txt"
num_processes = args.num_processes
def try_multiP_read_bin_create_pcd(bin_path,now_pcd_path):
save_pcd_path = now_pcd_path + '/' + (bin_path.split('/')[-1]).split('.')[-2] + '.pcd'
datas_ls = np.fromfile(bin_path, dtype="float32").reshape(-1, 4)
point_num = len(datas_ls)
datas_np = np.array(datas_ls, dtype=np.float32)#.tolist()
#print(datas_np)
with open(save_pcd_path,"a") as f:
f.write('# .PCD v0.7 - Point Cloud Data file formatnVERSION 0.7nFIELDS x y z intensitynSIZE 4 4 4 4nTYPE F F F FnCOUNT 1 1 1 1')
string = 'nWIDTH ' + str(point_num)
f.write(string)
f.write('nHEIGHT 1nVIEWPOINT 0 0 0 1 0 0 0')
string = 'nPOINTS ' + str(point_num)
f.write(string)
f.write('nDATA ascii')
#np.savetxt(f,datas_np)
for i in datas_np:
string = 'n' + str(i[0]) + ' ' + str(i[1]) + ' ' + str(i[2])+ ' ' + str(i[3])
#print(string)
f.write(string)
print(save_pcd_path)
def read_bin_create_pcd(original_bin_path,now_pcd_path):
if os.path.exists(now_pcd_path):
shutil.rmtree(now_pcd_path)
os.mkdir(now_pcd_path)
pool = multiprocessing.Pool(num_processes)
for bin_path in sorted(glob.glob(original_bin_path + '/'+'*.bin')):
#print(bin_path)
pool.apply_async(try_multiP_read_bin_create_pcd, (bin_path,now_pcd_path))
pool.close()
pool.join()
read_bin_create_pcd(original_bin_path,now_pcd_path)
print("All files have been processed!!!")
print("All files have been processed!!!")
print("All files have been processed!!!")



