@@ -930,9 +930,15 @@ pcl::PCDWriter::generateHeaderBinary (const pcl::PCLPointCloud2 &cloud,
930930 " \n VERSION 0.7"
931931 " \n FIELDS" ;
932932
933+ auto fields = cloud.fields ;
934+ std::sort (fields.begin (), fields.end (), [](const auto & field_a, const auto & field_b)
935+ {
936+ return field_a.offset < field_b.offset ;
937+ });
938+
933939 // Compute the total size of the fields
934940 unsigned int fsize = 0 ;
935- for (const auto &field : cloud. fields )
941+ for (const auto &field : fields)
936942 fsize += field.count * getFieldSize (field.datatype );
937943
938944 // The size of the fields cannot be larger than point_step
@@ -945,20 +951,20 @@ pcl::PCDWriter::generateHeaderBinary (const pcl::PCLPointCloud2 &cloud,
945951 std::stringstream field_names, field_types, field_sizes, field_counts;
946952 // Check if the size of the fields is smaller than the size of the point step
947953 std::size_t toffset = 0 ;
948- for (std::size_t i = 0 ; i < cloud. fields .size (); ++i)
954+ for (std::size_t i = 0 ; i < fields.size (); ++i)
949955 {
950956 // If field offsets do not match, then we need to create fake fields
951- if (toffset != cloud. fields [i].offset )
957+ if (toffset != fields[i].offset )
952958 {
953959 // If we're at the last "valid" field
954960 int fake_offset = (i == 0 ) ?
955961 // Use the current_field offset
956- (cloud. fields [i].offset )
962+ (fields[i].offset )
957963 :
958964 // Else, do cur_field.offset - prev_field.offset + sizeof (prev_field)
959- (cloud. fields [i].offset -
960- (cloud. fields [i-1 ].offset +
961- cloud. fields [i-1 ].count * getFieldSize (cloud. fields [i-1 ].datatype )));
965+ (fields[i].offset -
966+ (fields[i-1 ].offset +
967+ fields[i-1 ].count * getFieldSize (fields[i-1 ].datatype )));
962968
963969 toffset += fake_offset;
964970
@@ -969,11 +975,11 @@ pcl::PCDWriter::generateHeaderBinary (const pcl::PCLPointCloud2 &cloud,
969975 }
970976
971977 // Add the regular dimension
972- toffset += cloud. fields [i].count * getFieldSize (cloud. fields [i].datatype );
973- field_names << " " << cloud. fields [i].name ;
974- field_sizes << " " << pcl::getFieldSize (cloud. fields [i].datatype );
975- field_types << " " << pcl::getFieldType (cloud. fields [i].datatype );
976- int count = std::abs (static_cast <int > (cloud. fields [i].count ));
978+ toffset += fields[i].count * getFieldSize (fields[i].datatype );
979+ field_names << " " << fields[i].name ;
980+ field_sizes << " " << pcl::getFieldSize (fields[i].datatype );
981+ field_types << " " << pcl::getFieldType (fields[i].datatype );
982+ int count = std::abs (static_cast <int > (fields[i].count ));
977983 if (count == 0 ) count = 1 ; // check for 0 counts (coming from older converter code)
978984 field_counts << " " << count;
979985 }
@@ -1173,6 +1179,29 @@ pcl::PCDWriter::writeASCII (const std::string &file_name, const pcl::PCLPointClo
11731179 return (0 );
11741180}
11751181
1182+ // /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
1183+ int
1184+ pcl::PCDWriter::writeBinary (std::ostream &os, const pcl::PCLPointCloud2 &cloud,
1185+ const Eigen::Vector4f &origin, const Eigen::Quaternionf &orientation)
1186+ {
1187+ if (cloud.data .empty ())
1188+ {
1189+ PCL_WARN (" [pcl::PCDWriter::writeBinary] Input point cloud has no data!\n " );
1190+ }
1191+ if (cloud.fields .empty ())
1192+ {
1193+ PCL_ERROR (" [pcl::PCDWriter::writeBinary] Input point cloud has no field data!\n " );
1194+ return (-1 );
1195+ }
1196+
1197+ os.imbue (std::locale::classic ());
1198+ os << generateHeaderBinary (cloud, origin, orientation) << " DATA binary\n " ;
1199+ std::copy (cloud.data .cbegin (), cloud.data .cend (), std::ostream_iterator<char > (os));
1200+ os.flush ();
1201+
1202+ return (os ? 0 : -1 );
1203+ }
1204+
11761205// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
11771206int
11781207pcl::PCDWriter::writeBinary (const std::string &file_name, const pcl::PCLPointCloud2 &cloud,
0 commit comments