Updated dependencies and fixed linter warnings

This commit is contained in:
2024-08-09 18:44:29 +02:00
parent bd257cf2dd
commit c104a22407
11 changed files with 47 additions and 45 deletions

View File

@@ -39,17 +39,17 @@ ironsea_index_sfc_dbc = "0.1"
ironsea_index_hashmap = "0.1" ironsea_index_hashmap = "0.1"
arrayref = "0.3" # For Positions Objects arrayref = "0.3" # For Positions Objects
lazy_static = "1.4" lazy_static = "1.5"
memmap = "0.7" memmap = "0.7"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
bincode = "1.3.0" bincode = "1.3"
# Logging macros API # Logging macros API
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] } #log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] } log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs as integration test # Used for main.rs as integration test
pretty_env_logger = { version = "0.3", optional = true } # Logger implementation pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "0.6", optional = true } # To mesure parsing time, only required by binary measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary

View File

@@ -176,11 +176,7 @@ impl Core {
// We cannot return less that the total number of individual Ids stored // We cannot return less that the total number of individual Ids stored
// in the index for a full-volume query. // in the index for a full-volume query.
let max_elements = if let Some(elem) = max_elements { let max_elements = max_elements.map(|elem| elem.max(properties.len()));
Some(elem.max(properties.len()))
} else {
None
};
for space in spaces { for space in spaces {
// Filter the points of this space, and encode them before creating the index. // Filter the points of this space, and encode them before creating the index.
@@ -196,7 +192,7 @@ impl Core {
object.set_position(space.encode(&position)?); object.set_position(space.encode(&position)?);
} }
space_dbs.push(SpaceDB::new(&space, filtered, scales.clone(), max_elements)) space_dbs.push(SpaceDB::new(space, filtered, scales.clone(), max_elements))
} }
Ok(Core { Ok(Core {
@@ -504,7 +500,7 @@ impl Core {
}, },
} }
}) })
.flat_map(|v| v); .flatten();
// Select based on the volume, and filter out the label position themselves. // Select based on the volume, and filter out the label position themselves.
for s in &self.space_db { for s in &self.space_db {

View File

@@ -116,7 +116,7 @@ impl DataBase {
list.len() list.len()
)) ))
} else { } else {
Ok(&list[0]) Ok(list[0])
} }
} }

View File

@@ -206,10 +206,10 @@ impl From<u64> for Coordinate {
// Slight syntax hack, as exclusive ranges are not yet available. // Slight syntax hack, as exclusive ranges are not yet available.
// cf: https://github.com/rust-lang/rust/issues/37854 // cf: https://github.com/rust-lang/rust/issues/37854
match v { match v {
_ if v <= u64::from(std::u8::MAX) => Coordinate::CoordinateU8(v as u8), _ if v <= u64::from(u8::MAX) => Coordinate::CoordinateU8(v as u8),
_ if v <= u64::from(std::u16::MAX) => Coordinate::CoordinateU16(v as u16), _ if v <= u64::from(u16::MAX) => Coordinate::CoordinateU16(v as u16),
_ if v <= u64::from(std::u32::MAX) => Coordinate::CoordinateU32(v as u32), _ if v <= u64::from(u32::MAX) => Coordinate::CoordinateU32(v as u32),
_ => Coordinate::CoordinateU64(v as u64), _ => Coordinate::CoordinateU64(v),
/*_ => { /*_ => {
panic!("Out of range {} > {}", v, std::u64::MAX); panic!("Out of range {} > {}", v, std::u64::MAX);
} */ } */

View File

@@ -84,8 +84,8 @@ impl CoordinateSystem {
match self { match self {
CoordinateSystem::Universe { .. } => { CoordinateSystem::Universe { .. } => {
for _ in 0..self.dimensions() { for _ in 0..self.dimensions() {
low.push(std::f64::MIN); low.push(f64::MIN);
high.push(std::f64::MAX); high.push(f64::MAX);
} }
} }
CoordinateSystem::AffineSystem { axes, .. } => { CoordinateSystem::AffineSystem { axes, .. } => {

View File

@@ -19,7 +19,7 @@ use serde::Serialize;
use super::coordinate::Coordinate; use super::coordinate::Coordinate;
/// Store a position as efficiently as possible in terms of space. /// Store a position as efficiently as possible in terms of space.
#[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, Serialize)] #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum Position { pub enum Position {
/// 1 dimension positions. /// 1 dimension positions.
Position1(Coordinate), Position1(Coordinate),
@@ -128,6 +128,12 @@ impl Display for Position {
} }
} }
impl Ord for Position {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).unwrap()
}
}
impl PartialOrd for Position { impl PartialOrd for Position {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
// Let's restrict for now to same-length vectors. // Let's restrict for now to same-length vectors.
@@ -144,7 +150,7 @@ impl PartialOrd for Position {
return None; return None;
} }
let ordering = ordering.drain().filter_map(|v| v).collect::<Vec<_>>(); let ordering = ordering.drain().flatten().collect::<Vec<_>>();
match ordering.len() { match ordering.len() {
3 => None, 3 => None,
2 => { 2 => {
@@ -357,14 +363,14 @@ impl<'s> From<&'s Position> for Vec<&'s Coordinate> {
fn from(position: &'s Position) -> Self { fn from(position: &'s Position) -> Self {
match position { match position {
Position::Position1(coordinate) => vec![coordinate], Position::Position1(coordinate) => vec![coordinate],
Position::Position2(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position2(coordinates) => coordinates.iter().collect(),
Position::Position3(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position3(coordinates) => coordinates.iter().collect(),
Position::Position4(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position4(coordinates) => coordinates.iter().collect(),
Position::Position5(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position5(coordinates) => coordinates.iter().collect(),
Position::Position6(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position6(coordinates) => coordinates.iter().collect(),
Position::Position7(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position7(coordinates) => coordinates.iter().collect(),
Position::Position8(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position8(coordinates) => coordinates.iter().collect(),
Position::PositionN(coordinates) => coordinates.iter().map(|c| c).collect(), Position::PositionN(coordinates) => coordinates.iter().collect(),
} }
} }
} }

View File

@@ -276,7 +276,7 @@ impl Shape {
/// Compute the volume. /// Compute the volume.
pub fn volume(&self) -> f64 { pub fn volume(&self) -> f64 {
match self { match self {
Shape::Point(_) => std::f64::EPSILON, // Smallest non-zero volume possible Shape::Point(_) => f64::EPSILON, // Smallest non-zero volume possible
Shape::BoundingBox(low, high) => { Shape::BoundingBox(low, high) => {
let mut volume = 1.0; let mut volume = 1.0;

View File

@@ -1,6 +1,7 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryInto;
use std::hash::Hash; use std::hash::Hash;
use std::hash::Hasher; use std::hash::Hasher;
@@ -79,7 +80,7 @@ impl SpaceDB {
.collect(); .collect();
// Make sure we do not shift more position than available // Make sure we do not shift more position than available
let shift = if count >= 31 { 31 } else { count as u32 }; let shift: u32 = if count >= 31 { 31 } else { count.try_into().unwrap() };
indices.push(( indices.push((
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS), SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
vec![power.0, power.0, power.0], vec![power.0, power.0, power.0],

View File

@@ -218,7 +218,7 @@ impl SpaceIndex {
// then add the condition of the radius as we are working within // then add the condition of the radius as we are working within
// a sphere. // a sphere.
let results = self let results = self
.find_range(&lower, &higher) .find_range(lower, higher)
.filter(move |(position, _)| (position - &center).norm() <= radius.f64()); .filter(move |(position, _)| (position - &center).norm() <= radius.f64());
Ok(Box::new(results)) Ok(Box::new(results))

View File

@@ -220,13 +220,13 @@ pub mod v2 {
pub fn from_spaces_by_properties<'o>( pub fn from_spaces_by_properties<'o>(
objects: Box< objects: Box<
(dyn Iterator< (dyn Iterator<
Item = ( Item=(
&'o database::Properties, &'o database::Properties,
Vec<(&'o String, Box<dyn Iterator<Item = space::Position> + 'o>)>, Vec<(&'o String, Box<dyn Iterator<Item=space::Position> + 'o>)>,
), ),
> + 'o), > + 'o),
>, >,
) -> impl Iterator<Item = SpatialObject> + 'o { ) -> impl Iterator<Item=SpatialObject> + 'o {
objects.map(|(property, positions_by_spaces)| { objects.map(|(property, positions_by_spaces)| {
let volumes = positions_by_spaces let volumes = positions_by_spaces
.into_iter() .into_iter()
@@ -234,13 +234,10 @@ pub mod v2 {
// We are not using vec![] as we now beforehand we // We are not using vec![] as we now beforehand we
// will have only one element in the vector, so we // will have only one element in the vector, so we
// optimise for space by allocating it as such. // optimise for space by allocating it as such.
let mut shapes = Vec::with_capacity(1); let shapes = vec![
Shape::Points(positions.map(|position|
shapes.push(Shape::Points( position.into()).collect::<Vec<_>>())
positions ];
.map(|position| position.into())
.collect::<Vec<_>>(),
));
Volume { Volume {
space: space.clone(), space: space.clone(),
@@ -263,9 +260,9 @@ pub mod v2 {
/// ///
/// * `list`: /// * `list`:
/// A list of (**Space Id**, [ ( *Spatial position*, `&Properties` ) ]) tuples. /// A list of (**Space Id**, [ ( *Spatial position*, `&Properties` ) ]) tuples.
pub fn from_properties_by_spaces<'o>( pub fn from_properties_by_spaces(
objects: database::IterObjectsBySpaces<'o>, objects: database::IterObjectsBySpaces<'_>,
) -> impl Iterator<Item = SpatialObject> + 'o { ) -> impl Iterator<Item=SpatialObject> + '_ {
// Filter per Properties, in order to regroup by it, then build // Filter per Properties, in order to regroup by it, then build
// a single SpatialObject per Properties. // a single SpatialObject per Properties.
let mut hashmap = HashMap::new(); let mut hashmap = HashMap::new();

View File

@@ -180,7 +180,9 @@ fn convert(string: &str) -> Result<Vec<SpatialObject>, Error> {
let (x, y, z) = (x - origin[0], y - origin[1], z - origin[2]); let (x, y, z) = (x - origin[0], y - origin[1], z - origin[2]);
let (x, y, z) = (x * 0.039_062_5, y * 0.039_062_5, z * 0.039_062_5); let (x, y, z) = (x * 0.039_062_5, y * 0.039_062_5, z * 0.039_062_5);
oids.entry(oid).or_insert_with(Vec::new).push(vec![x, y, z]); oids.entry(oid)
.or_insert_with(Vec::new)
.push(vec![x, y, z]);
} }
} }
_ => trace!("line {:?}, values: {:?}", line, values), _ => trace!("line {:?}, values: {:?}", line, values),