1 Commits
master ... lts

Author SHA1 Message Date
1e884a0c21 Updated dependencies and fixed linter warnings 2024-08-09 18:44:35 +02:00
11 changed files with 48 additions and 48 deletions

View File

@@ -35,18 +35,18 @@ ironsea_index = "^0.1"
ironsea_index_sfc_dbc = "^0.1" ironsea_index_sfc_dbc = "^0.1"
ironsea_index_hashmap = "^0.1" ironsea_index_hashmap = "^0.1"
arrayref = "^0.3" # For Positions Objects arrayref = "0.3" # For Positions Objects
lazy_static = "^1.3" lazy_static = "1.5"
memmap = "^0.7" memmap = "0.7"
serde = { version = "^1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "^1.0" serde_json = "1.0"
bincode = "^1.1" bincode = "1.3"
# Logging macros API # Logging macros API
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] } #log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] } log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs as integration test # Used for main.rs as integration test
pretty_env_logger = { version = "^0.3", optional = true } # Logger implementation pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "^0.6", optional = true } # To mesure parsing time, only required by binary measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary

View File

@@ -174,11 +174,7 @@ impl Core {
// We cannot return less that the total number of individual Ids stored // We cannot return less that the total number of individual Ids stored
// in the index for a full-volume query. // in the index for a full-volume query.
let max_elements = if let Some(elem) = max_elements { let max_elements = max_elements.map(|elem| elem.max(properties.len()));
Some(elem.max(properties.len()))
} else {
None
};
for space in spaces { for space in spaces {
// Filter the points of this space, and encode them before creating the index. // Filter the points of this space, and encode them before creating the index.
@@ -194,7 +190,7 @@ impl Core {
object.set_position(space.encode(&position)?); object.set_position(space.encode(&position)?);
} }
space_dbs.push(SpaceDB::new(&space, filtered, scales.clone(), max_elements)) space_dbs.push(SpaceDB::new(space, filtered, scales.clone(), max_elements))
} }
Ok(Core { Ok(Core {
@@ -232,7 +228,7 @@ impl Core {
// Rebase the point to the requested output space before decoding. // Rebase the point to the requested output space before decoding.
for (position, _) in list { for (position, _) in list {
*position = unified *position = unified
.decode(&Space::change_base(&position, space, unified)?)? .decode(&Space::change_base(position, space, unified)?)?
.into(); .into();
} }
} else { } else {
@@ -240,7 +236,7 @@ impl Core {
// respective reference space. // respective reference space.
for (position, _) in list { for (position, _) in list {
// Simply decode // Simply decode
*position = space.decode(&position)?.into(); *position = space.decode(position)?.into();
} }
} }
@@ -278,7 +274,7 @@ impl Core {
// Filter positions based on the view port, if present // Filter positions based on the view port, if present
let filtered = match parameters.view_port(from) { let filtered = match parameters.view_port(from) {
None => positions.iter().map(|p| p).collect::<Vec<_>>(), None => positions.iter().collect::<Vec<_>>(),
Some(view_port) => positions Some(view_port) => positions
.iter() .iter()
.filter(|&p| view_port.contains(p)) .filter(|&p| view_port.contains(p))
@@ -475,7 +471,7 @@ impl Core {
}, },
} }
}) })
.flat_map(|v| v); .flatten();
let search_volume = if let Some(view) = view_port { let search_volume = if let Some(view) = view_port {
search_volume search_volume

View File

@@ -109,7 +109,7 @@ impl DataBase {
list.len() list.len()
)) ))
} else { } else {
Ok(&list[0]) Ok(list[0])
} }
} }

View File

@@ -206,10 +206,10 @@ impl From<u64> for Coordinate {
// Slight syntax hack, as exclusive ranges are not yet available. // Slight syntax hack, as exclusive ranges are not yet available.
// cf: https://github.com/rust-lang/rust/issues/37854 // cf: https://github.com/rust-lang/rust/issues/37854
match v { match v {
_ if v <= u64::from(std::u8::MAX) => Coordinate::CoordinateU8(v as u8), _ if v <= u64::from(u8::MAX) => Coordinate::CoordinateU8(v as u8),
_ if v <= u64::from(std::u16::MAX) => Coordinate::CoordinateU16(v as u16), _ if v <= u64::from(u16::MAX) => Coordinate::CoordinateU16(v as u16),
_ if v <= u64::from(std::u32::MAX) => Coordinate::CoordinateU32(v as u32), _ if v <= u64::from(u32::MAX) => Coordinate::CoordinateU32(v as u32),
_ => Coordinate::CoordinateU64(v as u64), _ => Coordinate::CoordinateU64(v),
/*_ => { /*_ => {
panic!("Out of range {} > {}", v, std::u64::MAX); panic!("Out of range {} > {}", v, std::u64::MAX);
} */ } */

View File

@@ -84,8 +84,8 @@ impl CoordinateSystem {
match self { match self {
CoordinateSystem::Universe { .. } => { CoordinateSystem::Universe { .. } => {
for _ in 0..self.dimensions() { for _ in 0..self.dimensions() {
low.push(std::f64::MIN); low.push(f64::MIN);
high.push(std::f64::MAX); high.push(f64::MAX);
} }
} }
CoordinateSystem::AffineSystem { axes, .. } => { CoordinateSystem::AffineSystem { axes, .. } => {

View File

@@ -19,7 +19,7 @@ use serde::Serialize;
use super::coordinate::Coordinate; use super::coordinate::Coordinate;
/// Store a position as efficiently as possible in terms of space. /// Store a position as efficiently as possible in terms of space.
#[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, Serialize)] #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum Position { pub enum Position {
/// 1 dimension positions. /// 1 dimension positions.
Position1(Coordinate), Position1(Coordinate),
@@ -127,6 +127,11 @@ impl Display for Position {
} }
} }
impl Ord for Position {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).unwrap()
}
}
impl PartialOrd for Position { impl PartialOrd for Position {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
// Let's restrict for now to same-length vectors. // Let's restrict for now to same-length vectors.
@@ -143,7 +148,7 @@ impl PartialOrd for Position {
return None; return None;
} }
let ordering = ordering.drain().filter_map(|v| v).collect::<Vec<_>>(); let ordering = ordering.drain().flatten().collect::<Vec<_>>();
match ordering.len() { match ordering.len() {
3 => None, 3 => None,
2 => { 2 => {
@@ -356,14 +361,14 @@ impl<'s> From<&'s Position> for Vec<&'s Coordinate> {
fn from(position: &'s Position) -> Self { fn from(position: &'s Position) -> Self {
match position { match position {
Position::Position1(coordinate) => vec![coordinate], Position::Position1(coordinate) => vec![coordinate],
Position::Position2(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position2(coordinates) => coordinates.iter().collect(),
Position::Position3(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position3(coordinates) => coordinates.iter().collect(),
Position::Position4(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position4(coordinates) => coordinates.iter().collect(),
Position::Position5(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position5(coordinates) => coordinates.iter().collect(),
Position::Position6(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position6(coordinates) => coordinates.iter().collect(),
Position::Position7(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position7(coordinates) => coordinates.iter().collect(),
Position::Position8(coordinates) => coordinates.iter().map(|c| c).collect(), Position::Position8(coordinates) => coordinates.iter().collect(),
Position::PositionN(coordinates) => coordinates.iter().map(|c| c).collect(), Position::PositionN(coordinates) => coordinates.iter().collect(),
} }
} }
} }

View File

@@ -39,7 +39,7 @@ impl Shape {
//FIXME: Is the length properly dealt with? How do we process this for space conversions? //FIXME: Is the length properly dealt with? How do we process this for space conversions?
let mut r = Vec::with_capacity(center.dimensions()); let mut r = Vec::with_capacity(center.dimensions());
for _ in 0..center.dimensions() { for _ in 0..center.dimensions() {
r.push(radius.clone()); r.push(*radius);
} }
let r = r.into(); let r = r.into();
let r = from.absolute_position(&r)?; let r = from.absolute_position(&r)?;
@@ -276,7 +276,7 @@ impl Shape {
/// Compute the volume. /// Compute the volume.
pub fn volume(&self) -> f64 { pub fn volume(&self) -> f64 {
match self { match self {
Shape::Point(_) => std::f64::EPSILON, // Smallest non-zero volume possible Shape::Point(_) => f64::EPSILON, // Smallest non-zero volume possible
Shape::BoundingBox(low, high) => { Shape::BoundingBox(low, high) => {
let mut volume = 1.0; let mut volume = 1.0;

View File

@@ -1,6 +1,7 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryInto;
use std::hash::Hash; use std::hash::Hash;
use std::hash::Hasher; use std::hash::Hasher;
@@ -59,8 +60,7 @@ impl SpaceDB {
} }
// Apply fixed scales // Apply fixed scales
let mut count = 0; for (count, power) in powers.iter().enumerate() {
for power in &powers {
space_objects = space_objects space_objects = space_objects
.into_iter() .into_iter()
.map(|mut o| { .map(|mut o| {
@@ -79,8 +79,7 @@ impl SpaceDB {
.collect(); .collect();
// Make sure we do not shift more position than available // Make sure we do not shift more position than available
let shift = if count >= 31 { 31 } else { count }; let shift: u32 = if count >= 31 { 31 } else { count.try_into().unwrap() };
count += 1;
indices.push(( indices.push((
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS), SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
vec![power.0, power.0, power.0], vec![power.0, power.0, power.0],
@@ -352,7 +351,7 @@ impl SpaceDB {
let view_port = parameters.view_port(space); let view_port = parameters.view_port(space);
// Select the objects // Select the objects
let results = self.resolutions[index].find_by_shape(&shape, &view_port)?; let results = self.resolutions[index].find_by_shape(shape, &view_port)?;
Ok(results) Ok(results)
} }

View File

@@ -213,7 +213,7 @@ impl SpaceIndex {
// then add the condition of the radius as we are working within // then add the condition of the radius as we are working within
// a sphere. // a sphere.
let results = self let results = self
.find_range(&lower, &higher) .find_range(lower, higher)
.into_iter() .into_iter()
.filter(|(position, _)| (position - center).norm() <= radius.f64()) .filter(|(position, _)| (position - center).norm() <= radius.f64())
.collect(); .collect();

View File

@@ -115,7 +115,7 @@ pub mod v1 {
for (position, properties) in v { for (position, properties) in v {
hashmap hashmap
.entry(properties) .entry(properties)
.or_insert_with(|| vec![]) .or_insert_with(Vec::new)
.push((space, position)); .push((space, position));
} }
} }
@@ -226,7 +226,7 @@ pub mod v2 {
.entry(properties) .entry(properties)
.or_insert_with(HashMap::new) .or_insert_with(HashMap::new)
.entry(space) .entry(space)
.or_insert_with(|| vec![]) .or_insert_with(Vec::new)
.push(position.into()); .push(position.into());
} }
} }

View File

@@ -181,7 +181,7 @@ fn convert(string: &str) -> Result<Vec<SpatialObject>, Error> {
let (x, y, z) = (x * 0.039_062_5, y * 0.039_062_5, z * 0.039_062_5); let (x, y, z) = (x * 0.039_062_5, y * 0.039_062_5, z * 0.039_062_5);
oids.entry(oid) oids.entry(oid)
.or_insert_with(|| vec![]) .or_insert_with(Vec::new)
.push(vec![x, y, z]); .push(vec![x, y, z]);
} }
} }