Compare commits
29 Commits
6fc2cc5942
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| c104a22407 | |||
| bd257cf2dd | |||
| 17d9cbc86f | |||
| ba02162d97 | |||
| 9cab1916c9 | |||
| a10ffdac7d | |||
| a72fbcc88f | |||
| 3958ebaef2 | |||
| 9c3898bf46 | |||
| c45f9c145d | |||
| 08b4187ce3 | |||
| fdade86b2a | |||
| b384fc4c7e | |||
| 24e2e724e0 | |||
| 97fefec1c9 | |||
| d5429e88dd | |||
| 1fba351783 | |||
| b50bf5d49c | |||
| d66a84eaf7 | |||
| e154e549d3 | |||
| 85d322877e | |||
| 82050d6b75 | |||
| cdb3746a34 | |||
| 3b34991e24 | |||
| e0b6dda0ac | |||
| 617c2a1018 | |||
| b112fcfab6 | |||
| 8699c066e5 | |||
| 7a0fbc612f |
40
Cargo.toml
40
Cargo.toml
@@ -25,27 +25,31 @@ path = "src/lib.rs"
|
||||
[[bin]]
|
||||
name = "db-test"
|
||||
path = "src/main.rs"
|
||||
required-features = ["bin"]
|
||||
|
||||
[features]
|
||||
bin = ["measure_time", "pretty_env_logger"]
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
[dependencies]
|
||||
ironsea_index = "^0.1"
|
||||
ironsea_index_sfc_dbc = "^0.1"
|
||||
ironsea_index_hashmap = "^0.1"
|
||||
ironsea_table = "^0.1"
|
||||
ironsea_table_vector = "^0.1"
|
||||
ironsea_index = "0.1"
|
||||
ironsea_index_sfc_dbc = "0.1"
|
||||
ironsea_index_hashmap = "0.1"
|
||||
|
||||
memmap = "^0.7"
|
||||
lazy_static = "^1.3"
|
||||
arrayref = "^0.3" # For Positions Objects
|
||||
arrayref = "0.3" # For Positions Objects
|
||||
lazy_static = "1.5"
|
||||
memmap = "0.7"
|
||||
|
||||
serde = "^1.0"
|
||||
serde_derive = "^1.0"
|
||||
serde_json = "^1.0"
|
||||
bincode = "^1.1"
|
||||
|
||||
# Used for main.rs as integration test
|
||||
measure_time = "^0.6" # To mesure parsing time, only required by binary
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
bincode = "1.3"
|
||||
|
||||
# Logging macros API
|
||||
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] }
|
||||
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] }
|
||||
pretty_env_logger = "^0.3" # Logger implementation
|
||||
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
|
||||
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
|
||||
|
||||
# Used for main.rs as integration test
|
||||
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
|
||||
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
|
||||
|
||||
28
README.md
28
README.md
@@ -22,35 +22,9 @@ This enables the index implementations to be agnostic from the underlying data s
|
||||
|
||||
* Rust: https://www.rust-lang.org
|
||||
|
||||
## Quick start
|
||||
|
||||
## Building from sources
|
||||
|
||||
To build this project, you will need to run the following:
|
||||
|
||||
```sh
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
### Installation
|
||||
|
||||
To install the software on the system you can use:
|
||||
|
||||
```sh
|
||||
cargo install --release
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
The binary `db-test` provided is used only as an integration test at this point. It will convert a json input to a binary representation, before building an index over it. Once this is achieved, it will run a couple of hard-coded queries over the index.
|
||||
|
||||
```sh
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information, please refer to the [documentation](https://epfl-dias.github.io/PROJECT_NAME/).
|
||||
For more information, please refer to the [documentation](https://epfl-dias.github.io/mercator_db/).
|
||||
|
||||
If you want to build the documentation and access it locally, you can use:
|
||||
|
||||
|
||||
@@ -1,21 +1,41 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::space::Position;
|
||||
use super::space::Shape;
|
||||
use super::space::Space;
|
||||
use super::space_db::SpaceDB;
|
||||
use super::space_index::SpaceSetObject;
|
||||
use super::DataBase;
|
||||
use super::IterObjects;
|
||||
use super::IterPositions;
|
||||
use super::ResultSet;
|
||||
use crate::SpaceObject;
|
||||
|
||||
/// Query Parameters.
|
||||
pub struct CoreQueryParameters<'a> {
|
||||
/// Database to use.
|
||||
pub db: &'a DataBase,
|
||||
/// Output reference space into which to convert results.
|
||||
pub output_space: Option<&'a str>,
|
||||
/// Volume value to use to select the index resolution.
|
||||
//FIXME: IS this necessary given view_port?
|
||||
pub threshold_volume: Option<f64>,
|
||||
/// Full definition of the view port, a.k.a the volume being
|
||||
/// displayed.
|
||||
pub view_port: &'a Option<(Vec<f64>, Vec<f64>)>,
|
||||
pub resolution: Option<Vec<u32>>,
|
||||
/// Index resolution to use.
|
||||
pub resolution: &'a Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
impl CoreQueryParameters<'_> {
|
||||
/// Build a minimum bounding box out of the provided viewport, and
|
||||
/// rebase it in the target space.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `space`:
|
||||
/// Space to use for the encoded coordinates of the minimum
|
||||
/// bounding box.
|
||||
pub fn view_port(&self, space: &Space) -> Option<Shape> {
|
||||
if let Some((low, high)) = self.view_port {
|
||||
let view_port = Shape::BoundingBox(low.into(), high.into());
|
||||
@@ -29,20 +49,29 @@ impl CoreQueryParameters<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Definition of the volumetric objects identifiers.
|
||||
///
|
||||
/// We have two parts to it, first the *kind* and the actual, *id* used
|
||||
/// to distinguish different objects.
|
||||
// FIXME: Ids are expected unique, irrespective of the enum variant!
|
||||
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
|
||||
pub enum Properties {
|
||||
/// Spatial Features.
|
||||
Feature(String),
|
||||
/// Unoptimized arbitrary kind of *identifiers*.
|
||||
Unknown(String, String),
|
||||
}
|
||||
|
||||
impl Properties {
|
||||
pub fn id(&self) -> &String {
|
||||
/// Extract the *identifier* of this spatial object.
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Properties::Feature(id) => id,
|
||||
Properties::Unknown(id, _) => id,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the *kind* of spatial object.
|
||||
pub fn type_name(&self) -> &str {
|
||||
match self {
|
||||
Properties::Feature(_) => "Feature",
|
||||
@@ -50,6 +79,13 @@ impl Properties {
|
||||
}
|
||||
}
|
||||
|
||||
/// Instantiate a new *feature*.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `id`:
|
||||
/// The identifier of the object, which can be converted into a
|
||||
/// `String`.
|
||||
pub fn feature<S>(id: S) -> Properties
|
||||
where
|
||||
S: Into<String>,
|
||||
@@ -57,6 +93,17 @@ impl Properties {
|
||||
Properties::Feature(id.into())
|
||||
}
|
||||
|
||||
/// Instantiate a new arbitrary kind of object, with the given id.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `id`:
|
||||
/// The identifier of the object, which can be converted into a
|
||||
/// `String`.
|
||||
///
|
||||
/// * `type_name`:
|
||||
/// A value which can be converted into a `String`, and
|
||||
/// represent the **kind** of the object.
|
||||
pub fn unknown<S>(id: S, type_name: S) -> Properties
|
||||
where
|
||||
S: Into<String>,
|
||||
@@ -65,16 +112,7 @@ impl Properties {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Which is faster, the code below or the automatically generated
|
||||
// implementation?
|
||||
/*
|
||||
impl PartialEq for Properties {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id() == other.id() && self.type_name() == other.type_name()
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
/// Index over a single dataset
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Core {
|
||||
title: String,
|
||||
@@ -84,6 +122,43 @@ pub struct Core {
|
||||
}
|
||||
|
||||
impl Core {
|
||||
/// Instantiate a new index for a dataset.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `title`:
|
||||
/// The title to use for the new dataset.
|
||||
///
|
||||
/// * `version`:
|
||||
/// The revision of the new dataset.
|
||||
///
|
||||
/// * `spaces`:
|
||||
/// The list of reference spaces used within the dataset.
|
||||
///
|
||||
/// * `properties`:
|
||||
/// The *identifiers*, has an ordered list, which is referenced
|
||||
/// by the `space_objects` by offset within this list.
|
||||
///
|
||||
/// * `space_objects`:
|
||||
/// A list of links between volumetric positions and
|
||||
/// identifiers.
|
||||
///
|
||||
/// * `scales`:
|
||||
/// A list of resolutions for which to build indices. Each value
|
||||
/// represent the number of bits of precision to **remove** from
|
||||
/// the coordinates to build the index.
|
||||
///
|
||||
/// * `max_elements`:
|
||||
/// The minimum number of positions to use as a stopping
|
||||
/// condition while building automatically multiple resolutions
|
||||
/// of the index.
|
||||
///
|
||||
/// Each consecutive index will contains at most half the number
|
||||
/// of data points than the next finer-grained index.
|
||||
///
|
||||
/// The minimum number of elements contained within an index is
|
||||
/// this value or the number of *identifiers*, whichever is
|
||||
/// greater.
|
||||
pub fn new<S>(
|
||||
title: S,
|
||||
version: S,
|
||||
@@ -92,170 +167,182 @@ impl Core {
|
||||
space_objects: Vec<SpaceSetObject>,
|
||||
scales: Option<Vec<Vec<u32>>>,
|
||||
max_elements: Option<usize>,
|
||||
) -> Self
|
||||
//Result<Self, String>
|
||||
) -> Result<Self, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
// Sort out the space, and create a SpaceDB per reference space
|
||||
let mut space_dbs = vec![];
|
||||
|
||||
// We cannot return less that the total number of individual Ids stored
|
||||
// in the index for a full-volume query.
|
||||
let max_elements = max_elements.map(|elem| elem.max(properties.len()));
|
||||
|
||||
for space in spaces {
|
||||
// Filter the points of this space, and encode them before creating the index.
|
||||
let filtered = space_objects
|
||||
let mut filtered = space_objects
|
||||
.iter()
|
||||
.filter_map(|object| {
|
||||
if &object.space_id().0 == space.name() {
|
||||
let position: Vec<f64> = object.position().into();
|
||||
Some(SpaceSetObject::new(
|
||||
space.name(),
|
||||
space.encode(&position).unwrap(),
|
||||
*object.value(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
.filter(|object| object.space_id() == space.name())
|
||||
// Clone only the selected objects, not all of them!
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
space_dbs.push(SpaceDB::new(&space, filtered, scales.clone(), max_elements))
|
||||
for object in filtered.iter_mut() {
|
||||
let position: Vec<f64> = object.position().into();
|
||||
object.set_position(space.encode(&position)?);
|
||||
}
|
||||
|
||||
space_dbs.push(SpaceDB::new(space, filtered, scales.clone(), max_elements))
|
||||
}
|
||||
|
||||
Core {
|
||||
Ok(Core {
|
||||
title: title.into(),
|
||||
version: version.into(),
|
||||
properties,
|
||||
space_db: space_dbs,
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the given space_id is referenced in the current core.
|
||||
pub fn is_empty<S>(&self, space_id: S) -> bool
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let id = space_id.into();
|
||||
for s in &self.space_db {
|
||||
if s.name() == &id {
|
||||
return s.is_empty();
|
||||
}
|
||||
}
|
||||
|
||||
// Not found, so the space is empty.
|
||||
true
|
||||
})
|
||||
}
|
||||
|
||||
/// Title of the dataset.
|
||||
pub fn name(&self) -> &String {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Revision of the dataset.
|
||||
pub fn version(&self) -> &String {
|
||||
&self.version
|
||||
}
|
||||
|
||||
/// List of *identifiers* contained in this dataset.
|
||||
pub fn keys(&self) -> &Vec<Properties> {
|
||||
&self.properties
|
||||
}
|
||||
|
||||
fn to_space_object(&self, space_id: &str, list: Vec<SpaceSetObject>) -> Vec<SpaceObject> {
|
||||
list.into_iter()
|
||||
.map(|o| {
|
||||
let offset: usize = o.value().into();
|
||||
let value = self.properties[offset].clone();
|
||||
SpaceObject {
|
||||
space_id: space_id.to_string(),
|
||||
position: o.position().clone(),
|
||||
value,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn decode_positions(
|
||||
list: &mut [SpaceObject],
|
||||
space: &Space,
|
||||
db: &DataBase,
|
||||
fn decode_positions<'b>(
|
||||
list: IterObjects<'b>,
|
||||
space: &'b Space,
|
||||
db: &'b DataBase,
|
||||
output_space: &Option<&str>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(unified_id) = *output_space {
|
||||
) -> Result<IterObjects<'b>, String> {
|
||||
let b: IterObjects = if let Some(unified_id) = *output_space {
|
||||
let unified = db.space(unified_id)?;
|
||||
|
||||
// Rebase the point to the requested output space before decoding.
|
||||
for o in list {
|
||||
o.position = unified
|
||||
.decode(&Space::change_base(&o.position, space, unified)?)?
|
||||
.into();
|
||||
o.space_id = unified_id.to_string();
|
||||
}
|
||||
Box::new(list.filter_map(move |(position, properties)| {
|
||||
match Space::change_base(&position, space, unified) {
|
||||
Err(_) => None,
|
||||
Ok(rebased) => match unified.decode(&rebased) {
|
||||
Err(_) => None,
|
||||
Ok(decoded) => Some((decoded.into(), properties)),
|
||||
},
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
// Decode the positions into f64 values, which are defined in their
|
||||
// respective reference space.
|
||||
for o in list {
|
||||
// Simply decode
|
||||
o.position = space.decode(&o.position)?.into();
|
||||
}
|
||||
}
|
||||
Box::new(list.filter_map(
|
||||
move |(position, properties)| match space.decode(&position) {
|
||||
Err(_) => None,
|
||||
Ok(decoded) => Some((decoded.into(), properties)),
|
||||
},
|
||||
))
|
||||
};
|
||||
|
||||
Ok(())
|
||||
Ok(b)
|
||||
}
|
||||
|
||||
// Search by positions defining a volume.
|
||||
// Positions ARE DEFINED IN F64 VALUES IN THE SPACE. NOT ENCODED!
|
||||
pub fn get_by_positions(
|
||||
&self,
|
||||
parameters: &CoreQueryParameters,
|
||||
positions: &[Position],
|
||||
from: &str,
|
||||
) -> ResultSet {
|
||||
/// Retrieve everything located at specific positions.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `parameters`:
|
||||
/// Search parameters, see [CoreQueryParameters](struct.CoreQueryParameters.html).
|
||||
///
|
||||
/// * `positions`:
|
||||
/// Volume to use to filter data points.
|
||||
///
|
||||
/// * `space_id`:
|
||||
/// *positions* are defined as decoded coordinates in this
|
||||
/// reference space.
|
||||
///
|
||||
/// [shape]: space/enum.Shape.html
|
||||
pub fn get_by_positions<'d>(
|
||||
&'d self,
|
||||
parameters: &'d CoreQueryParameters,
|
||||
positions: Vec<Position>,
|
||||
space_id: &'d str,
|
||||
) -> ResultSet<'d> {
|
||||
let CoreQueryParameters {
|
||||
db, output_space, ..
|
||||
} = parameters;
|
||||
|
||||
let mut results = vec![];
|
||||
let count = positions.len();
|
||||
let from = db.space(from)?;
|
||||
|
||||
// Filter positions based on the view port, if present
|
||||
let filtered = match parameters.view_port(from) {
|
||||
None => positions.iter().map(|p| p).collect::<Vec<_>>(),
|
||||
Some(view_port) => positions
|
||||
.iter()
|
||||
.filter(|&p| view_port.contains(p))
|
||||
.collect::<Vec<_>>(),
|
||||
};
|
||||
let from = db.space(space_id)?;
|
||||
|
||||
for s in &self.space_db {
|
||||
let to = db.space(s.name())?;
|
||||
let mut p = Vec::with_capacity(count);
|
||||
|
||||
for position in filtered.as_slice() {
|
||||
let position: Vec<f64> = Space::change_base(position, from, to)?.into();
|
||||
p.push(to.encode(&position)?);
|
||||
}
|
||||
// Filter positions based on the view port, if present
|
||||
// FIXME: remove clone() on positions?
|
||||
let filtered: IterPositions = match parameters.view_port(from) {
|
||||
None => Box::new(positions.clone().into_iter()),
|
||||
Some(view_port) => Box::new(
|
||||
positions
|
||||
.clone()
|
||||
.into_iter()
|
||||
.filter(move |p| view_port.contains(p)),
|
||||
),
|
||||
};
|
||||
|
||||
let r = s.get_by_positions(&p, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
// Rebase the positions into the current space
|
||||
let p = filtered.filter_map(move |position| {
|
||||
match Space::change_base(&position, from, to) {
|
||||
Err(_) => None,
|
||||
Ok(position) => {
|
||||
let position: Vec<f64> = position.into();
|
||||
match to.encode(&position) {
|
||||
Err(_) => None,
|
||||
Ok(position) => Some(position),
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Self::decode_positions(&mut r, to, db, output_space)?;
|
||||
// Select the data based on the rebased viewport filter.
|
||||
let r = s
|
||||
.get_by_positions(p, parameters)?
|
||||
.map(move |(position, fields)| (position, &self.properties[fields.value()]));
|
||||
|
||||
results.append(&mut r);
|
||||
results.push((
|
||||
s.name(),
|
||||
Self::decode_positions(Box::new(r), to, db, output_space)?,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
// Search by shape defining a volume:
|
||||
// * Hyperrectangle (MBB),
|
||||
// * HyperSphere (radius around a point),
|
||||
// * Point (Specific position)
|
||||
|
||||
// SHAPE IS DEFINED IN F64 VALUES IN THE SPACE. NOT ENCODED!
|
||||
pub fn get_by_shape(
|
||||
&self,
|
||||
parameters: &CoreQueryParameters,
|
||||
shape: &Shape,
|
||||
space_id: &str,
|
||||
) -> ResultSet {
|
||||
/// Search using a [shape] which defines a volume.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `parameters`:
|
||||
/// Search parameters, see [CoreQueryParameters](struct.CoreQueryParameters.html).
|
||||
///
|
||||
/// * `shape`:
|
||||
/// Volume to use to filter data points.
|
||||
///
|
||||
/// * `space_id`:
|
||||
/// *shape* is defined as decoded coordinates in this
|
||||
/// reference space.
|
||||
///
|
||||
/// [shape]: space/enum.Shape.html
|
||||
pub fn get_by_shape<'d>(
|
||||
&'d self,
|
||||
parameters: &'d CoreQueryParameters,
|
||||
shape: Shape,
|
||||
space_id: &'d str,
|
||||
) -> ResultSet<'d> {
|
||||
let CoreQueryParameters {
|
||||
db, output_space, ..
|
||||
} = parameters;
|
||||
@@ -271,19 +358,34 @@ impl Core {
|
||||
// let current_shape = shape.encode(current_space)?;
|
||||
// println!("current shape Encoded: {:?}", current_shape);
|
||||
|
||||
let r = s.get_by_shape(¤t_shape, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let r = s
|
||||
.get_by_shape(current_shape, parameters)?
|
||||
.map(move |(position, fields)| (position, &self.properties[fields.value()]));
|
||||
|
||||
Self::decode_positions(&mut r, current_space, db, output_space)?;
|
||||
|
||||
results.append(&mut r);
|
||||
results.push((
|
||||
s.name(),
|
||||
Self::decode_positions(Box::new(r), current_space, db, output_space)?,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
// Search by Id, a.k.a values
|
||||
pub fn get_by_id<S>(&self, parameters: &CoreQueryParameters, id: S) -> ResultSet
|
||||
/// Search by Id, a.k.a retrieve all the positions linked to this id.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `parameters`:
|
||||
/// Search parameters, see [CoreQueryParameters](struct.CoreQueryParameters.html).
|
||||
///
|
||||
/// * `id`:
|
||||
/// Identifier for which to retrieve is positions.
|
||||
///
|
||||
pub fn get_by_id<'s, S>(
|
||||
&'s self,
|
||||
parameters: &'s CoreQueryParameters,
|
||||
id: S,
|
||||
) -> Result<Vec<(&String, IterPositions<'s>)>, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
@@ -297,28 +399,63 @@ impl Core {
|
||||
// Do we have this ID registered at all?
|
||||
if let Ok(offset) = self
|
||||
.properties
|
||||
.binary_search_by_key(&&id, |properties| properties.id())
|
||||
.binary_search_by_key(&id.as_str(), |properties| properties.id())
|
||||
{
|
||||
// Yes, so now let's find all the position linked to it, per
|
||||
// reference space
|
||||
for s in &self.space_db {
|
||||
let current_space = db.space(s.name())?;
|
||||
|
||||
let r = s.get_by_id(offset, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let positions_by_id = s.get_by_id(offset, parameters)?;
|
||||
|
||||
Self::decode_positions(&mut r, current_space, db, output_space)?;
|
||||
//Self::decode_positions(r.as_mut_slice(), current_space, db, output_space)?;
|
||||
let positions: IterPositions = if let Some(unified_id) = *output_space {
|
||||
let unified = db.space(unified_id)?;
|
||||
|
||||
results.append(&mut r);
|
||||
// Rebase the point to the requested output space before decoding.
|
||||
Box::new(positions_by_id.filter_map(move |position| {
|
||||
match Space::change_base(&position, current_space, unified) {
|
||||
Err(_) => None,
|
||||
Ok(rebased) => match unified.decode(&rebased) {
|
||||
Err(_) => None,
|
||||
Ok(decoded) => Some(decoded.into()),
|
||||
},
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
// Decode the positions into f64 values, which are defined in their
|
||||
// respective reference space.
|
||||
Box::new(positions_by_id.filter_map(move |position| {
|
||||
match current_space.decode(&position) {
|
||||
Err(_) => None,
|
||||
Ok(decoded) => Some(decoded.into()),
|
||||
}
|
||||
}))
|
||||
};
|
||||
|
||||
results.push((s.name(), positions));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
// Search by Label, a.k.a within a volume defined by the positions of an Id.
|
||||
// FIXME: NEED TO KEEP TRACK OF SPACE IDS AND DO CONVERSIONS
|
||||
pub fn get_by_label<S>(&self, parameters: &CoreQueryParameters, id: S) -> ResultSet
|
||||
/// Search by label, a.k.a use an identifier to define the search
|
||||
/// volume.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `parameters`:
|
||||
/// Search parameters, see [CoreQueryParameters](struct.CoreQueryParameters.html).
|
||||
///
|
||||
/// * `id`:
|
||||
/// Identifier to use to define the search volume.
|
||||
///
|
||||
pub fn get_by_label<'d, S>(
|
||||
&'d self,
|
||||
parameters: &'d CoreQueryParameters,
|
||||
id: S,
|
||||
) -> ResultSet<'d>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
@@ -334,7 +471,7 @@ impl Core {
|
||||
|
||||
if let Ok(offset) = self
|
||||
.properties
|
||||
.binary_search_by_key(&&id, |properties| properties.id())
|
||||
.binary_search_by_key(&id.as_str(), |properties| properties.id())
|
||||
{
|
||||
// Generate the search volume. Iterate over all reference spaces, to
|
||||
// retrieve a list of SpaceSetObjects linked to `id`, then iterate
|
||||
@@ -342,7 +479,7 @@ impl Core {
|
||||
let search_volume = self
|
||||
.space_db
|
||||
.iter()
|
||||
.filter_map(|s| {
|
||||
.filter_map(move |s| {
|
||||
match db.space(s.name()) {
|
||||
Err(_) => None,
|
||||
Ok(from) => match s.get_by_id(offset, parameters) {
|
||||
@@ -350,9 +487,9 @@ impl Core {
|
||||
Ok(v) => {
|
||||
// Convert the search Volume into Universe.
|
||||
let mut p = vec![];
|
||||
for o in v {
|
||||
for position in v {
|
||||
if let Ok(position) =
|
||||
Space::change_base(o.position(), from, Space::universe())
|
||||
Space::change_base(&position, from, Space::universe())
|
||||
{
|
||||
p.push(position)
|
||||
}
|
||||
@@ -363,42 +500,43 @@ impl Core {
|
||||
},
|
||||
}
|
||||
})
|
||||
.flat_map(|v| v);
|
||||
|
||||
let search_volume = if let Some(view) = view_port {
|
||||
search_volume
|
||||
.filter(|p| view.contains(p))
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
search_volume.collect::<Vec<_>>()
|
||||
};
|
||||
.flatten();
|
||||
|
||||
// Select based on the volume, and filter out the label position themselves.
|
||||
for s in &self.space_db {
|
||||
let to = db.space(s.name())?;
|
||||
let mut p = vec![];
|
||||
|
||||
let search_volume: IterPositions = if let Some(view) = view_port.clone() {
|
||||
Box::new(search_volume.clone().filter(move |p| view.contains(p)))
|
||||
} else {
|
||||
Box::new(search_volume.clone())
|
||||
};
|
||||
|
||||
// Convert the search Volume into the target space.
|
||||
for position in &search_volume {
|
||||
let position = Space::change_base(position, Space::universe(), to)?;
|
||||
p.push(position);
|
||||
}
|
||||
let p = search_volume.filter_map(move |position| {
|
||||
match Space::change_base(&position, Space::universe(), to) {
|
||||
Err(_) => None,
|
||||
Ok(position) => Some(position),
|
||||
}
|
||||
});
|
||||
|
||||
let r = s.get_by_positions(&p, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let r = s
|
||||
.get_by_positions(p, parameters)?
|
||||
.filter_map(move |(position, fields)| {
|
||||
if fields.value() == offset {
|
||||
None
|
||||
} else {
|
||||
Some((position, &self.properties[fields.value()]))
|
||||
}
|
||||
});
|
||||
|
||||
Self::decode_positions(&mut r, to, db, output_space)?;
|
||||
|
||||
results.append(&mut r);
|
||||
results.push((
|
||||
s.name(),
|
||||
Self::decode_positions(Box::new(r), to, db, output_space)?,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::Record<String> for Core {
|
||||
fn key(&self) -> String {
|
||||
self.title.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,75 +1,68 @@
|
||||
mod db_core;
|
||||
pub mod space;
|
||||
mod space_db;
|
||||
mod space_index;
|
||||
pub(crate) mod space_index;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
|
||||
use ironsea_index::Indexed;
|
||||
use ironsea_table_vector::VectorTable;
|
||||
use memmap::Mmap;
|
||||
|
||||
use super::storage;
|
||||
pub use db_core::Core;
|
||||
pub use db_core::CoreQueryParameters;
|
||||
pub use db_core::Properties;
|
||||
use space::Position;
|
||||
use space::Space;
|
||||
pub use space_index::SpaceSetObject;
|
||||
|
||||
pub type ResultSet = Result<Vec<SpaceObject>, String>;
|
||||
pub type ReferenceSpaceIndex = ironsea_index_hashmap::Index<VectorTable<Space>, Space, String>;
|
||||
type CoreIndex = ironsea_index_hashmap::Index<VectorTable<Core>, Core, String>;
|
||||
/// TODO doc
|
||||
pub type IterPositions<'i> = Box<dyn Iterator<Item = Position> + 'i>;
|
||||
/// TODO doc
|
||||
pub type IterObjects<'i> = Box<dyn Iterator<Item = (Position, &'i Properties)> + 'i>;
|
||||
/// TODO doc
|
||||
pub type IterObjectsBySpaces<'i> = Vec<(&'i String, IterObjects<'i>)>;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Hash, PartialEq, Serialize)]
|
||||
pub struct SpaceId(String);
|
||||
/// Selected tuples matching a query.
|
||||
///
|
||||
/// This is either:
|
||||
/// * `Err` with a reason stored as a `String`
|
||||
/// * `Ok`, with a vector of tuples defined as:
|
||||
/// `(Space Name, [(Position, Properties)])`
|
||||
pub type ResultSet<'r> = Result<IterObjectsBySpaces<'r>, String>;
|
||||
|
||||
impl SpaceId {
|
||||
pub fn new<S>(space_name: S) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
SpaceId(space_name.into())
|
||||
}
|
||||
type ReferenceSpaceIndex = ironsea_index_hashmap::Index<Space, String>;
|
||||
type CoreIndex = ironsea_index_hashmap::Index<Core, String>;
|
||||
|
||||
pub fn get(&self, index: &ReferenceSpaceIndex) -> Self {
|
||||
let s = index.find(&self.0);
|
||||
assert_eq!(s.len(), 1);
|
||||
|
||||
SpaceId(s[0].name().clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> From<S> for SpaceId
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
fn from(id: S) -> Self {
|
||||
SpaceId(id.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)]
|
||||
pub struct SpaceObject {
|
||||
pub space_id: String,
|
||||
pub position: Position,
|
||||
pub value: Properties,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
/// Collection of datasets and their reference spaces.
|
||||
pub struct DataBase {
|
||||
reference_spaces: ReferenceSpaceIndex,
|
||||
cores: CoreIndex,
|
||||
}
|
||||
|
||||
impl DataBase {
|
||||
/// Instantiate a `DataBase` struct.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `spaces`:
|
||||
/// List of reference spaces.
|
||||
///
|
||||
/// * `cores`:
|
||||
/// List of datasets (cores) which will be queried through this
|
||||
/// `DataBase` struct.
|
||||
// TODO: Replace vectors with iterators?
|
||||
pub fn new(spaces: Vec<Space>, cores: Vec<Core>) -> Self {
|
||||
DataBase {
|
||||
reference_spaces: ReferenceSpaceIndex::new(VectorTable::new(spaces)),
|
||||
cores: CoreIndex::new(VectorTable::new(cores)),
|
||||
reference_spaces: ReferenceSpaceIndex::new(spaces.into_iter()),
|
||||
cores: CoreIndex::new(cores.into_iter()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Load a list of indices.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `indices`:
|
||||
/// The list of index file names to load.
|
||||
pub fn load(indices: &[&str]) -> Result<Self, String> {
|
||||
let mut spaces = HashMap::new();
|
||||
let mut cores = vec![];
|
||||
@@ -100,112 +93,72 @@ impl DataBase {
|
||||
Ok(DataBase::new(spaces, cores))
|
||||
}
|
||||
|
||||
fn mmap_file(filename: &str) -> Result<Mmap, String> {
|
||||
let file_in = match File::open(filename) {
|
||||
Err(e) => return Err(format!("{:?}", e)),
|
||||
Ok(file) => file,
|
||||
};
|
||||
|
||||
match unsafe { Mmap::map(&file_in) } {
|
||||
Err(e) => Err(format!("{:?}", e)),
|
||||
Ok(mmap) => Ok(mmap),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_core(name: &str) -> Result<(Vec<Space>, Core), String> {
|
||||
let mmap = DataBase::mmap_file(&name)?;
|
||||
|
||||
match bincode::deserialize(&mmap[..]) {
|
||||
fn load_core(name: &str) -> Result<(Vec<Space>, Core), String> {
|
||||
match storage::bincode::load(name) {
|
||||
Err(e) => Err(format!("Index deserialization error: {:?}", e)),
|
||||
Ok(index) => Ok(index),
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the given space_id is referenced in the DB.
|
||||
fn is_empty<S>(&self, id: S) -> bool
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let id = id.into();
|
||||
|
||||
for s in self.cores.keys() {
|
||||
let core: &Core = self.cores.find(s)[0];
|
||||
if !core.is_empty(id.clone()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn check_exactly_one<'t, T, S>(list: &[&'t T], name: S, value: S) -> Result<&'t T, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
fn check_exactly_one<'t, T>(list: &[&'t T], name: &str, value: &str) -> Result<&'t T, String> {
|
||||
if list.len() > 1 {
|
||||
Err(format!(
|
||||
"Multiple {} registered under `{}`: {}",
|
||||
name.into(),
|
||||
value.into(),
|
||||
name,
|
||||
value,
|
||||
list.len()
|
||||
))
|
||||
} else if list.is_empty() {
|
||||
Err(format!(
|
||||
"No {} registered under `{}`: {}",
|
||||
name.into(),
|
||||
value.into(),
|
||||
name,
|
||||
value,
|
||||
list.len()
|
||||
))
|
||||
} else {
|
||||
Ok(&list[0])
|
||||
Ok(list[0])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn space_id<S>(&self, name: S) -> Result<SpaceId, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let name = name.into();
|
||||
let r = self.reference_spaces.find(&name);
|
||||
let s: &Space = Self::check_exactly_one(&r, "spaces", &name)?;
|
||||
|
||||
Ok(SpaceId(s.name().clone()))
|
||||
}
|
||||
|
||||
// Lookup a space within the reference spaces registered
|
||||
/// Returns an ordered list of the reference space names registered.
|
||||
pub fn space_keys(&self) -> &Vec<String> {
|
||||
self.reference_spaces.keys()
|
||||
}
|
||||
|
||||
// Lookup a space within the reference spaces registered
|
||||
pub fn space<S>(&self, name: S) -> Result<&Space, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let name = name.into();
|
||||
if &name == space::Space::universe().name() {
|
||||
/// Lookup a space within the reference spaces registered.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// The name of the reference space to search for.
|
||||
pub fn space(&self, name: &str) -> Result<&Space, String> {
|
||||
if name == space::Space::universe().name() {
|
||||
Ok(space::Space::universe())
|
||||
} else {
|
||||
let r = self.reference_spaces.find(&name);
|
||||
let r = self
|
||||
.reference_spaces
|
||||
.find(&name.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Self::check_exactly_one(&r, "spaces", &name)
|
||||
Self::check_exactly_one(&r, "spaces", name)
|
||||
}
|
||||
}
|
||||
|
||||
// Lookup a space within the reference spaces registered
|
||||
/// Returns an ordered list of dataset (Core) names registered.
|
||||
pub fn core_keys(&self) -> &Vec<String> {
|
||||
self.cores.keys()
|
||||
}
|
||||
|
||||
// Lookup a dataset within the datasets registered
|
||||
pub fn core<S>(&self, name: S) -> Result<&Core, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let name = name.into();
|
||||
let r = self.cores.find(&name);
|
||||
/// Lookup a dataset within the datasets registered.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// The name of the dataset (core) to search for.
|
||||
pub fn core(&self, name: &str) -> Result<&Core, String> {
|
||||
let r = self.cores.find(&name.to_string()).collect::<Vec<_>>();
|
||||
|
||||
Self::check_exactly_one(&r, "cores", &name)
|
||||
Self::check_exactly_one(&r, "cores", name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,3 +167,9 @@ impl ironsea_index::Record<String> for Space {
|
||||
self.name().clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::Record<String> for Core {
|
||||
fn key(&self) -> String {
|
||||
self.name().clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,25 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::coordinate::Coordinate;
|
||||
use super::position::Position;
|
||||
|
||||
/// Mathematical set numbers.
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub enum NumberSet {
|
||||
/// [Natural numbers](https://en.wikipedia.org/wiki/Natural_number), here including **0**.
|
||||
N,
|
||||
/// [Integers](https://en.wikipedia.org/wiki/Integer).
|
||||
Z,
|
||||
/// [Rational](https://en.wikipedia.org/wiki/Rational_number) numbers.
|
||||
Q,
|
||||
/// [Real](https://en.wikipedia.org/wiki/Real_number) numbers.
|
||||
R,
|
||||
}
|
||||
|
||||
impl From<String> for NumberSet {
|
||||
fn from(set: String) -> Self {
|
||||
match set.as_str() {
|
||||
impl From<&str> for NumberSet {
|
||||
fn from(set: &str) -> Self {
|
||||
match set {
|
||||
"N" => NumberSet::N,
|
||||
"Z" => NumberSet::Z,
|
||||
"Q" => NumberSet::Q,
|
||||
@@ -21,8 +29,8 @@ impl From<String> for NumberSet {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NumberSet> for String {
|
||||
fn from(set: NumberSet) -> String {
|
||||
impl From<&NumberSet> for String {
|
||||
fn from(set: &NumberSet) -> String {
|
||||
let s = match set {
|
||||
NumberSet::N => "N",
|
||||
NumberSet::Z => "R",
|
||||
@@ -34,12 +42,19 @@ impl From<NumberSet> for String {
|
||||
}
|
||||
}
|
||||
|
||||
/// Definition of a fixed-precision, finite length axis.
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct Graduation {
|
||||
/// Set of numbers allowed on the axis.
|
||||
pub set: NumberSet,
|
||||
/// Minimum value *inclusive*.
|
||||
pub minimum: f64,
|
||||
/// Maximum value *inclusive*.
|
||||
pub maximum: f64,
|
||||
/// Number of *ticks* or discrete values between `minimum` and
|
||||
/// `maximum`.
|
||||
pub steps: u64,
|
||||
/// Length between two distinct *ticks* on the axis.
|
||||
pub epsilon: f64,
|
||||
}
|
||||
|
||||
@@ -57,7 +72,7 @@ impl Graduation {
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum UnitSI {
|
||||
enum UnitSI {
|
||||
// Partial list, which is tailored to the use case needs. Prevents possible
|
||||
// confusions between Mm and mm, for example.
|
||||
m,
|
||||
@@ -110,16 +125,45 @@ impl From<&str> for UnitSI {
|
||||
}
|
||||
}
|
||||
|
||||
/// Definition of an axis of a base.
|
||||
///
|
||||
/// This links together valid values on this axis, as well as the
|
||||
/// direction in the Universe of the axis and the base length unit of
|
||||
/// the `1.0` value.
|
||||
// TODO: In the future this might become an Enum with AffineAxis, ArbitraryAxis, etc...
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct Axis {
|
||||
measurement_unit: UnitSI,
|
||||
graduation: Graduation,
|
||||
// Coordinates in Universe, expressed in f64, and in the Universe number of dimensions.
|
||||
pub unit_vector: Position,
|
||||
// Coordinates in Universe, expressed in f64, and in the Universe
|
||||
// number of dimensions.
|
||||
unit_vector: Position,
|
||||
}
|
||||
|
||||
impl Axis {
|
||||
/// Instanciate a new Axis definition.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `unit`:
|
||||
/// SI Unit to use on this axis for the `1.0` value.
|
||||
/// See [measurement_unit](#method.measurement_unit).
|
||||
///
|
||||
/// * `unit_vector`:
|
||||
/// A vector providing the direction in the Universe space of
|
||||
/// this axis.
|
||||
///
|
||||
/// * `set`:
|
||||
/// The valid numbers on this axis.
|
||||
///
|
||||
/// * `minimum`:
|
||||
/// The minimum value described by this axis *included*.
|
||||
///
|
||||
/// * `maximum`:
|
||||
/// The maximum value described by this axis *included*.
|
||||
///
|
||||
/// * `steps`:
|
||||
/// The number of steps, or discrete *ticks* on this axis.
|
||||
pub fn new(
|
||||
unit: &str,
|
||||
unit_vector: Vec<f64>,
|
||||
@@ -139,20 +183,48 @@ impl Axis {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn measurement_unit(&self) -> String {
|
||||
self.measurement_unit.to_str().into()
|
||||
/// The unit, as in [SI unit] used on this axis, more specifically,
|
||||
/// a [metric prefix] of the **meter**.
|
||||
///
|
||||
/// Currently the following values are supported:
|
||||
/// * `m`
|
||||
/// * `dm`
|
||||
/// * `cm`
|
||||
/// * `mm`
|
||||
/// * `um`
|
||||
/// * `nm`
|
||||
/// * `pm`
|
||||
///
|
||||
/// [SI unit]: https://en.wikipedia.org/wiki/International_System_of_Units
|
||||
/// [metric prefix]: https://en.wikipedia.org/wiki/Metric_prefix
|
||||
pub fn measurement_unit(&self) -> &str {
|
||||
self.measurement_unit.to_str()
|
||||
}
|
||||
|
||||
/// The unit vector of the axis.
|
||||
///
|
||||
/// This vector is expressed in the Universe coordinate system.
|
||||
pub fn unit_vector(&self) -> &Position {
|
||||
&self.unit_vector
|
||||
}
|
||||
|
||||
/// The valid number range and properties on this axis.
|
||||
pub fn graduation(&self) -> &Graduation {
|
||||
&self.graduation
|
||||
}
|
||||
|
||||
// Project a point expressed in Universe coordinates from the origin of this
|
||||
// axis on this axis.
|
||||
/// Project a position on this axis.
|
||||
///
|
||||
/// The resulting coordinate is expressed as an encoded coordinate
|
||||
/// on this axis.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// The position to project on this axis. It must be defined in
|
||||
/// Universe coordinates, but with any translations already
|
||||
/// applied so that the origin of the vector is the origin of
|
||||
/// this axis.
|
||||
pub fn project_in(&self, position: &Position) -> Result<Coordinate, String> {
|
||||
let max = self.graduation.maximum;
|
||||
let min = self.graduation.minimum;
|
||||
@@ -160,38 +232,64 @@ impl Axis {
|
||||
let d = position.dot_product(&self.unit_vector);
|
||||
|
||||
// Apply Unit scaling
|
||||
let d = d / self.measurement_unit.factor();
|
||||
let mut d = d / self.measurement_unit.factor();
|
||||
|
||||
// Ensure it is within allowed range: Upper bound.
|
||||
if d > max {
|
||||
return Err(format!(
|
||||
"project_in: position out of bounds: {} >= {}",
|
||||
d, max
|
||||
));
|
||||
// FIXME: Should we generate an error instead?
|
||||
//return Err(format!(
|
||||
// "project_in: position out of bounds: {} >= {}",
|
||||
// d, max
|
||||
//));
|
||||
|
||||
// FIXME: For now, just clip.
|
||||
d = max;
|
||||
}
|
||||
|
||||
// Ensure it is within allowed range: Lower bound.
|
||||
if d < min {
|
||||
return Err(format!(
|
||||
"project_in: position out of bounds: {} < {}",
|
||||
d, min
|
||||
));
|
||||
// FIXME: Should we generate an error instead?
|
||||
//return Err(format!(
|
||||
// "project_in: position out of bounds: {} < {}",
|
||||
// d, min
|
||||
//));
|
||||
|
||||
// FIXME: For now, just clip.
|
||||
d = min;
|
||||
}
|
||||
|
||||
self.encode(d)
|
||||
}
|
||||
|
||||
// Convert a value on this axis to Universe coordinates, based from the origin of this axis.
|
||||
/// Convert an encoded coordinate expressed on this axis into a
|
||||
/// position.
|
||||
///
|
||||
/// The resulting position is expressed in the Universe reference
|
||||
/// space, but from the origin of this axis. Any required
|
||||
/// translation must be applied to this resulting position to obtain
|
||||
/// an absolute value in the Universe space.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `coordinate`:
|
||||
/// The coordinate to project out of this axis. It must be
|
||||
/// defined as an encoded coordinate on this axis.
|
||||
pub fn project_out(&self, coordinate: &Coordinate) -> Result<Position, String> {
|
||||
let d = self.decode(coordinate)?;
|
||||
|
||||
// Apply Unit scaling
|
||||
let d = d * self.measurement_unit.factor();
|
||||
|
||||
Ok(self.unit_vector.clone() * d)
|
||||
Ok(&self.unit_vector * d)
|
||||
}
|
||||
|
||||
// Value is expressed on the current Axis, not in absolute coordinates!
|
||||
/// Encode a coordinate expressed on this axis.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `val`:
|
||||
/// The coordinate to encode. It must be defined as a
|
||||
/// coordinate on this axis.
|
||||
pub fn encode(&self, val: f64) -> Result<Coordinate, String> {
|
||||
let max = self.graduation.maximum;
|
||||
let min = self.graduation.minimum;
|
||||
@@ -218,7 +316,13 @@ impl Axis {
|
||||
Ok(v.into())
|
||||
}
|
||||
|
||||
// Value is expressed on the current Axis, not in absolute coordinates!
|
||||
/// Decode a coordinate expressed on this axis.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `val`:
|
||||
/// The coordinate to decode. It must be defined as an encoded
|
||||
/// coordinate on this axis.
|
||||
pub fn decode(&self, val: &Coordinate) -> Result<f64, String> {
|
||||
let max = self.graduation.maximum;
|
||||
let min = self.graduation.minimum;
|
||||
|
||||
@@ -8,18 +8,39 @@ use std::ops::Add;
|
||||
use std::ops::Mul;
|
||||
use std::ops::Sub;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
/// Store efficiently a coordinate.
|
||||
///
|
||||
/// While you can manually create a `Coordinate` value directly, using
|
||||
/// the `From` trait will automatically choose the most efficient enum
|
||||
/// member to store the value. This it the recommended way of using this
|
||||
/// struct.
|
||||
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
|
||||
pub enum Coordinate {
|
||||
/// Encoded coordinates whose value is in the range `[0; 2^8[`.
|
||||
CoordinateU8(u8),
|
||||
/// Encoded coordinates whose value is in the range `[0; 2^16[`,
|
||||
/// but should be used only for the range `[2^8; 2^16[`.
|
||||
CoordinateU16(u16),
|
||||
/// Encoded coordinates whose value is in the range `[0; 2^32[`,
|
||||
/// but should be used only for the range `[2^16; 2^32[`.
|
||||
CoordinateU32(u32),
|
||||
/// Encoded coordinates whose value is in the range `[0; 2^64[`,
|
||||
/// but should be used only for the range `[2^32; 2^64[`.
|
||||
CoordinateU64(u64),
|
||||
// We currently assume that 2^64 is enough to store encoded position values per axis.
|
||||
//CoordinateU128(u128),
|
||||
/// Decoded coordinate value expressed as a floating point value over 64 bits.
|
||||
/// For details on the precision, please see the
|
||||
/// [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754) reference.
|
||||
CoordinateF64(f64),
|
||||
}
|
||||
|
||||
impl Coordinate {
|
||||
/// Return the value as a `f64`, this may introduce a loss of
|
||||
/// precision for encoded values.
|
||||
pub fn f64(&self) -> f64 {
|
||||
match *self {
|
||||
Coordinate::CoordinateU8(v) => f64::from(v),
|
||||
@@ -30,6 +51,7 @@ impl Coordinate {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the value as `u64`, this is valid only on encoded values.
|
||||
pub fn u64(&self) -> u64 {
|
||||
match *self {
|
||||
Coordinate::CoordinateU8(v) => u64::from(v),
|
||||
@@ -39,6 +61,12 @@ impl Coordinate {
|
||||
Coordinate::CoordinateF64(_v) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the value as `usize`, this is valid only on encoded
|
||||
/// values.
|
||||
pub fn as_usize(&self) -> usize {
|
||||
self.u64() as usize
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -178,10 +206,10 @@ impl From<u64> for Coordinate {
|
||||
// Slight syntax hack, as exclusive ranges are not yet available.
|
||||
// cf: https://github.com/rust-lang/rust/issues/37854
|
||||
match v {
|
||||
_ if v <= u64::from(std::u8::MAX) => Coordinate::CoordinateU8(v as u8),
|
||||
_ if v <= u64::from(std::u16::MAX) => Coordinate::CoordinateU16(v as u16),
|
||||
_ if v <= u64::from(std::u32::MAX) => Coordinate::CoordinateU32(v as u32),
|
||||
_ => Coordinate::CoordinateU64(v as u64),
|
||||
_ if v <= u64::from(u8::MAX) => Coordinate::CoordinateU8(v as u8),
|
||||
_ if v <= u64::from(u16::MAX) => Coordinate::CoordinateU16(v as u16),
|
||||
_ if v <= u64::from(u32::MAX) => Coordinate::CoordinateU32(v as u32),
|
||||
_ => Coordinate::CoordinateU64(v),
|
||||
/*_ => {
|
||||
panic!("Out of range {} > {}", v, std::u64::MAX);
|
||||
} */
|
||||
|
||||
@@ -1,16 +1,44 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::axis::Axis;
|
||||
use super::coordinate::Coordinate;
|
||||
use super::position::Position;
|
||||
use super::MAX_K;
|
||||
|
||||
/// Kinds of space coordinate systems, or bases
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub enum CoordinateSystem {
|
||||
Universe,
|
||||
// Coordinates in Universe, expressed in f64, and in the Universe number of dimensions.
|
||||
AffineSystem { origin: Position, axes: Vec<Axis> },
|
||||
/// Absolute base, which allows to generate transformation between
|
||||
/// spaces by anchoring them relative to each other.
|
||||
Universe {
|
||||
/// A position which contains zeroes for all its coordinates,
|
||||
/// but has a coordinate per dimensions of the highest
|
||||
/// dimensions space referenced.
|
||||
origin: Position,
|
||||
},
|
||||
/// Base which needs only an affine transformation to map into the Universe.
|
||||
AffineSystem {
|
||||
/// Coordinates in Universe, expressed in f64, or decoded, and
|
||||
/// in the Universe number of dimensions.
|
||||
origin: Position,
|
||||
|
||||
/// The definition of the coordinate system, through its axes.
|
||||
axes: Vec<Axis>,
|
||||
},
|
||||
}
|
||||
|
||||
impl CoordinateSystem {
|
||||
/// Instantiate a new coordinate system.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `origin`:
|
||||
/// The translation vector in Universe coordinates of this
|
||||
/// base.
|
||||
///
|
||||
/// * `axes`:
|
||||
/// The list of axes defining the coordinate system.
|
||||
pub fn new(origin: Vec<f64>, axes: Vec<Axis>) -> Self {
|
||||
CoordinateSystem::AffineSystem {
|
||||
origin: origin.into(),
|
||||
@@ -18,42 +46,46 @@ impl CoordinateSystem {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> Position {
|
||||
/// The translation vector, in Universe coordinates.
|
||||
pub fn origin(&self) -> &Position {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
let origin = [0f64; MAX_K].to_vec();
|
||||
origin.into()
|
||||
}
|
||||
CoordinateSystem::AffineSystem { origin, .. } => origin.clone(),
|
||||
CoordinateSystem::Universe { origin, .. } => origin,
|
||||
CoordinateSystem::AffineSystem { origin, .. } => origin,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn axes(&self) -> Vec<Axis> {
|
||||
/// The axes definition of this base.
|
||||
pub fn axes(&self) -> &Vec<Axis> {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
//FIXME: Generate a CoordinateSystem on the fly or store it as part of the Universe Space?
|
||||
unimplemented!()
|
||||
}
|
||||
CoordinateSystem::AffineSystem { axes, .. } => axes.clone(),
|
||||
CoordinateSystem::AffineSystem { axes, .. } => axes,
|
||||
}
|
||||
}
|
||||
|
||||
/// The number of dimensions of positions within this base.
|
||||
pub fn dimensions(&self) -> usize {
|
||||
match self {
|
||||
CoordinateSystem::Universe => MAX_K,
|
||||
CoordinateSystem::Universe { .. } => MAX_K,
|
||||
CoordinateSystem::AffineSystem { axes, .. } => axes.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// The smallest bounding box containing the whole base, expressed
|
||||
/// in decoded Universe coordinates.
|
||||
///
|
||||
// FIXME: Add the translation vector!
|
||||
pub fn bounding_box(&self) -> (Position, Position) {
|
||||
let mut low = Vec::with_capacity(self.dimensions());
|
||||
let mut high = Vec::with_capacity(self.dimensions());
|
||||
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
for _ in 0..self.dimensions() {
|
||||
low.push(std::f64::MAX);
|
||||
high.push(std::f64::MIN);
|
||||
low.push(f64::MIN);
|
||||
high.push(f64::MAX);
|
||||
}
|
||||
}
|
||||
CoordinateSystem::AffineSystem { axes, .. } => {
|
||||
@@ -67,6 +99,9 @@ impl CoordinateSystem {
|
||||
(low.into(), high.into())
|
||||
}
|
||||
|
||||
/// The volume of this space.
|
||||
///
|
||||
// FIXME: This assumes orthogonal spaces!
|
||||
pub fn volume(&self) -> f64 {
|
||||
let (low, high) = self.bounding_box();
|
||||
let difference: Vec<_> = (high - low).into();
|
||||
@@ -80,20 +115,31 @@ impl CoordinateSystem {
|
||||
volume
|
||||
}
|
||||
|
||||
// The position is expressed in coordinates in the universe,
|
||||
// return a position in the current coordinate system.
|
||||
/// Rebase a position in this coordinate space.
|
||||
///
|
||||
/// Each coordinate is encoded individually, and a new `Position`
|
||||
/// is generated.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// expressed in decoded Universe coordinates.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The encoded coordinates within this coordinate system.
|
||||
pub fn rebase(&self, position: &Position) -> Result<Position, String> {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { origin } => {
|
||||
// Ensure the coordinates are encoded into F64 variants of
|
||||
// coordinates by forcing an addition to the origin position
|
||||
// which is expressed as F64 variants. The addition will convert
|
||||
// to F64 automatically.
|
||||
Ok(self.origin().clone() + position.clone())
|
||||
Ok(origin + position)
|
||||
}
|
||||
CoordinateSystem::AffineSystem { origin, axes } => {
|
||||
let dimensions = axes.len();
|
||||
let translated = position.clone() - origin.clone();
|
||||
let translated = position - origin;
|
||||
let mut rebased = Vec::with_capacity(dimensions);
|
||||
|
||||
for a in axes.iter().take(dimensions) {
|
||||
@@ -106,20 +152,28 @@ impl CoordinateSystem {
|
||||
}
|
||||
}
|
||||
|
||||
// The position is expressed in coordinates in the current coordinate system,
|
||||
// return a position in Universe coordinates.
|
||||
/// Express the position in the Universe coordinate system.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// expressed as an encoded coordinates in the coordinate system.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The position expressed in Universe decoded coordinates.
|
||||
pub fn absolute_position(&self, position: &Position) -> Result<Position, String> {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { origin } => {
|
||||
// Ensure the coordinates are encoded into F64 variants of
|
||||
// coordinates by forcing an addition to the origin position
|
||||
// which is expressed as F64 variants. The addition will convert
|
||||
// to F64 automatically.
|
||||
Ok(self.origin().clone() + position.clone())
|
||||
Ok(origin + position)
|
||||
}
|
||||
CoordinateSystem::AffineSystem { axes, .. } => {
|
||||
// Start from the base origin.
|
||||
let mut rebased = self.origin();
|
||||
let mut rebased = self.origin().clone();
|
||||
|
||||
// Convert to Universe coordinates
|
||||
for k in 0..axes.len() {
|
||||
@@ -132,13 +186,24 @@ impl CoordinateSystem {
|
||||
}
|
||||
}
|
||||
|
||||
// The position is expressed in the current system
|
||||
// Encode each coordinate separately and return an encoded Position
|
||||
/// Encode a position expressed in the current coordinate system.
|
||||
///
|
||||
/// Each coordinate is encoded individually, and a new `Position`
|
||||
/// is generated.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// expressed in the current coordinate system.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The encoded coordinates within this coordinate system.
|
||||
pub fn encode(&self, position: &[f64]) -> Result<Position, String> {
|
||||
let mut encoded = vec![];
|
||||
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
assert_eq!(position.len(), MAX_K);
|
||||
for c in position {
|
||||
encoded.push(Coordinate::CoordinateF64(*c));
|
||||
@@ -155,13 +220,25 @@ impl CoordinateSystem {
|
||||
Ok(encoded.into())
|
||||
}
|
||||
|
||||
// The position is expressed in the current system as an encoded value,
|
||||
// return a position in the current system as f64 values.
|
||||
/// Decode a position expressed in the current coordinate system as
|
||||
/// an encoded value.
|
||||
///
|
||||
/// Each coordinate is decoded individually.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// expressed in the current coordinate system, as encoded
|
||||
/// values.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The decoded coordinates within this coordinate system.
|
||||
pub fn decode(&self, position: &Position) -> Result<Vec<f64>, String> {
|
||||
let mut decoded = vec![];
|
||||
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
assert_eq!(position.dimensions(), MAX_K);
|
||||
for c in 0..position.dimensions() {
|
||||
decoded.push(position[c].into());
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
//! Reference space definitions.
|
||||
//!
|
||||
//! This include notions such as shapes, positions, axes, etc…
|
||||
|
||||
mod axis;
|
||||
mod coordinate;
|
||||
mod coordinate_system;
|
||||
@@ -7,6 +11,9 @@ mod shape;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
pub use axis::Axis;
|
||||
pub use axis::Graduation;
|
||||
pub use axis::NumberSet;
|
||||
@@ -15,15 +22,25 @@ pub use coordinate_system::CoordinateSystem;
|
||||
pub use position::Position;
|
||||
pub use shape::Shape;
|
||||
|
||||
pub const MAX_K: usize = 3;
|
||||
// Maximum number of dimensions currently supported.
|
||||
//
|
||||
// **Note:** This will be deprecated as soon as support is implemented
|
||||
// in some dependencies. This is linked to limitations in
|
||||
// [ironsea_index_sfc_dbc].
|
||||
//
|
||||
// [ironsea_index_sfc_dbc]: https://github.com/epfl-dias/ironsea_index_sfc_dbc
|
||||
const MAX_K: usize = 3;
|
||||
|
||||
lazy_static! {
|
||||
static ref UNIVERSE: Space = Space {
|
||||
name: "Universe".into(),
|
||||
system: CoordinateSystem::Universe,
|
||||
system: CoordinateSystem::Universe {
|
||||
origin: [0f64; MAX_K].to_vec().into()
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// A reference space, defined by its name and coordinate system.
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct Space {
|
||||
name: String,
|
||||
@@ -31,6 +48,15 @@ pub struct Space {
|
||||
}
|
||||
|
||||
impl Space {
|
||||
/// Instantiate a new space.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// Id of the reference space.
|
||||
///
|
||||
/// * `system`:
|
||||
/// Coordinate system defintion of the reference space
|
||||
pub fn new<S>(name: S, system: CoordinateSystem) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
@@ -41,54 +67,93 @@ impl Space {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the Universe Space.
|
||||
///
|
||||
/// This space contains all of the spaces, and allows us to connect
|
||||
/// them between each others.
|
||||
pub fn universe() -> &'static Self {
|
||||
&UNIVERSE
|
||||
}
|
||||
|
||||
/// Transform a position from space `from` into a position in space `to`.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// Position to transform, expressed as encoded coordinates.
|
||||
///
|
||||
/// * `from`:
|
||||
/// Space in which `position` is defined.
|
||||
///
|
||||
/// * `to`:
|
||||
/// Target space in which `position` should be expressed.
|
||||
pub fn change_base(position: &Position, from: &Space, to: &Space) -> Result<Position, String> {
|
||||
to.rebase(&from.absolute_position(position)?)
|
||||
}
|
||||
|
||||
/// Id of the reference space.
|
||||
pub fn name(&self) -> &String {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> Position {
|
||||
/// Origin of the space, expressed in Universe.
|
||||
pub fn origin(&self) -> &Position {
|
||||
self.system.origin()
|
||||
}
|
||||
|
||||
pub fn axes(&self) -> Vec<Axis> {
|
||||
/// Axes definition of the space.
|
||||
pub fn axes(&self) -> &Vec<Axis> {
|
||||
self.system.axes()
|
||||
}
|
||||
|
||||
/// Returns the bounding box enclosing the whole space.
|
||||
pub fn bounding_box(&self) -> (Position, Position) {
|
||||
self.system.bounding_box()
|
||||
}
|
||||
|
||||
/// Total volume of the reference space.
|
||||
pub fn volume(&self) -> f64 {
|
||||
self.system.volume()
|
||||
}
|
||||
|
||||
// The position is expressed in coordinates in the universe,
|
||||
// return a position in the current space.
|
||||
pub fn rebase(&self, position: &Position) -> Result<Position, String> {
|
||||
// `position` is expressed in the Universe, this return encoded
|
||||
// coordinates in the current space.
|
||||
fn rebase(&self, position: &Position) -> Result<Position, String> {
|
||||
self.system.rebase(position)
|
||||
}
|
||||
|
||||
// The position is expressed in coordinates in the current space,
|
||||
// The position is expressed in encoded coordinates in the current space,
|
||||
// return an absolute position in Universe.
|
||||
pub fn absolute_position(&self, position: &Position) -> Result<Position, String> {
|
||||
fn absolute_position(&self, position: &Position) -> Result<Position, String> {
|
||||
self.system.absolute_position(position)
|
||||
}
|
||||
|
||||
// The position is expressed in the current space as an encoded value,
|
||||
// return a position in the current system as f64 values
|
||||
/// Decode coordinates expressed in the current space, to their
|
||||
/// values within the axes definitions.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// expressed in encoded coordinates within the current space.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The decoded position within the space.
|
||||
pub fn decode(&self, position: &Position) -> Result<Vec<f64>, String> {
|
||||
self.system.decode(position)
|
||||
}
|
||||
|
||||
// The position is expressed in the current space,
|
||||
// return a position expressed in the current space as an encoded value.
|
||||
/// Encode a position expressed in the current space within the axes
|
||||
/// value ranges.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// expressed in the current space.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The encoded coordinates within the space.
|
||||
pub fn encode(&self, position: &[f64]) -> Result<Position, String> {
|
||||
self.system.encode(position)
|
||||
}
|
||||
|
||||
@@ -13,26 +13,36 @@ use std::ops::MulAssign;
|
||||
use std::ops::Sub;
|
||||
use std::ops::SubAssign;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::coordinate::Coordinate;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, Serialize)]
|
||||
/// Store a position as efficiently as possible in terms of space.
|
||||
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
|
||||
pub enum Position {
|
||||
/// 1 dimension positions.
|
||||
Position1(Coordinate),
|
||||
/// 2 dimensions positions.
|
||||
Position2([Coordinate; 2]),
|
||||
/// 3 dimensions positions.
|
||||
Position3([Coordinate; 3]),
|
||||
/// 4 dimensions positions.
|
||||
Position4([Coordinate; 4]),
|
||||
/// 5 dimensions positions.
|
||||
Position5([Coordinate; 5]),
|
||||
/// 6 dimensions positions.
|
||||
Position6([Coordinate; 6]),
|
||||
/// 7 dimensions positions.
|
||||
Position7([Coordinate; 7]),
|
||||
/// 8 dimensions positions.
|
||||
Position8([Coordinate; 8]),
|
||||
/// N dimensions positions.
|
||||
PositionN(Vec<Coordinate>),
|
||||
}
|
||||
|
||||
impl Position {
|
||||
pub fn new(coordinates: Vec<Coordinate>) -> Self {
|
||||
coordinates.into()
|
||||
}
|
||||
|
||||
/// Returns the number of dimensions or size of the vector.
|
||||
pub fn dimensions(&self) -> usize {
|
||||
match self {
|
||||
Position::Position1(_) => 1,
|
||||
@@ -47,10 +57,11 @@ impl Position {
|
||||
}
|
||||
}
|
||||
|
||||
// Returns ||self||
|
||||
/// Compute `||self||`.
|
||||
pub fn norm(&self) -> f64 {
|
||||
if let Position::Position1(coordinates) = self {
|
||||
// the square root of a single number to the square is its positive value, so ensure it is.
|
||||
// the square root of a single number to the square is its
|
||||
// positive value, so ensure it is.
|
||||
coordinates.f64().abs()
|
||||
} else {
|
||||
let point: Vec<&Coordinate> = self.into();
|
||||
@@ -65,34 +76,48 @@ impl Position {
|
||||
}
|
||||
}
|
||||
|
||||
// Unit / Normalized vector from self.
|
||||
/// Compute the unit vector pointing in the same direction as `self`.
|
||||
pub fn unit(&self) -> Self {
|
||||
self.clone() * (1f64 / self.norm())
|
||||
self * (1f64 / self.norm())
|
||||
}
|
||||
|
||||
// This multiplies self^T with other, producing a scalar value
|
||||
pub fn dot_product(&self, other: &Self) -> f64 {
|
||||
assert_eq!(self.dimensions(), other.dimensions());
|
||||
/// Multiplies `self` with `rhs`, producing a scalar value.
|
||||
///
|
||||
/// `self • rhs = product`
|
||||
///
|
||||
/// **Note:** The two vector sizes must be equal, a.k.a the two
|
||||
/// vectors must have the same number of dimensions.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// `rhs`:
|
||||
/// The right-hand side vector.
|
||||
pub fn dot_product(&self, rhs: &Self) -> f64 {
|
||||
assert_eq!(self.dimensions(), rhs.dimensions());
|
||||
|
||||
let point = self.clone();
|
||||
let other = other.clone();
|
||||
let mut product = 0f64;
|
||||
|
||||
for k in 0..self.dimensions() {
|
||||
product += (point[k] * other[k]).f64();
|
||||
product += (self[k] * rhs[k]).f64();
|
||||
}
|
||||
|
||||
product
|
||||
}
|
||||
|
||||
/// Remove bits of precision.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `scale`:
|
||||
/// Number of bits of precision to remove from each coordinates.
|
||||
pub fn reduce_precision(&self, scale: u32) -> Self {
|
||||
let mut position = Vec::with_capacity(self.dimensions());
|
||||
|
||||
for i in 0..self.dimensions() {
|
||||
position.push((self[i].u64() >> scale).into())
|
||||
position.push(self[i].u64() >> scale)
|
||||
}
|
||||
|
||||
Position::new(position)
|
||||
position.into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,6 +128,12 @@ impl Display for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Position {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.partial_cmp(other).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Position {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
// Let's restrict for now to same-length vectors.
|
||||
@@ -119,7 +150,7 @@ impl PartialOrd for Position {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ordering = ordering.drain().filter_map(|v| v).collect::<Vec<_>>();
|
||||
let ordering = ordering.drain().flatten().collect::<Vec<_>>();
|
||||
match ordering.len() {
|
||||
3 => None,
|
||||
2 => {
|
||||
@@ -152,7 +183,10 @@ impl Index<usize> for Position {
|
||||
|
||||
fn index(&self, k: usize) -> &Self::Output {
|
||||
match self {
|
||||
Position::Position1(coordinate) => coordinate,
|
||||
Position::Position1(coordinate) => {
|
||||
assert_eq!(k, 0);
|
||||
coordinate
|
||||
}
|
||||
Position::Position2(coordinates) => &coordinates[k],
|
||||
Position::Position3(coordinates) => &coordinates[k],
|
||||
Position::Position4(coordinates) => &coordinates[k],
|
||||
@@ -168,7 +202,10 @@ impl Index<usize> for Position {
|
||||
impl IndexMut<usize> for Position {
|
||||
fn index_mut(&mut self, k: usize) -> &mut Self::Output {
|
||||
match self {
|
||||
Position::Position1(coordinate) => coordinate,
|
||||
Position::Position1(coordinate) => {
|
||||
assert_eq!(k, 0);
|
||||
coordinate
|
||||
}
|
||||
Position::Position2(coordinates) => &mut coordinates[k],
|
||||
Position::Position3(coordinates) => &mut coordinates[k],
|
||||
Position::Position4(coordinates) => &mut coordinates[k],
|
||||
@@ -190,6 +227,22 @@ impl Add for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for &Position {
|
||||
type Output = Position;
|
||||
|
||||
fn add(self, rhs: Self) -> Self::Output {
|
||||
let dimensions = self.dimensions();
|
||||
assert_eq!(dimensions, rhs.dimensions());
|
||||
let mut v = Vec::with_capacity(dimensions);
|
||||
|
||||
for k in 0..dimensions {
|
||||
v.push(self[k] + rhs[k]);
|
||||
}
|
||||
|
||||
v.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Position {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
let dimensions = self.dimensions();
|
||||
@@ -247,6 +300,21 @@ impl Mul<f64> for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<f64> for &Position {
|
||||
type Output = Position;
|
||||
|
||||
fn mul(self, rhs: f64) -> Self::Output {
|
||||
let dimensions = self.dimensions();
|
||||
let mut v = Vec::with_capacity(dimensions);
|
||||
|
||||
for k in 0..dimensions {
|
||||
v.push(self[k] * rhs);
|
||||
}
|
||||
|
||||
v.into()
|
||||
}
|
||||
}
|
||||
|
||||
// Scalar product
|
||||
impl MulAssign<f64> for Position {
|
||||
fn mul_assign(&mut self, rhs: f64) {
|
||||
@@ -295,14 +363,14 @@ impl<'s> From<&'s Position> for Vec<&'s Coordinate> {
|
||||
fn from(position: &'s Position) -> Self {
|
||||
match position {
|
||||
Position::Position1(coordinate) => vec![coordinate],
|
||||
Position::Position2(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position3(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position4(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position5(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position6(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position7(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position8(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::PositionN(coordinates) => coordinates.iter().map(|c| c).collect(),
|
||||
Position::Position2(coordinates) => coordinates.iter().collect(),
|
||||
Position::Position3(coordinates) => coordinates.iter().collect(),
|
||||
Position::Position4(coordinates) => coordinates.iter().collect(),
|
||||
Position::Position5(coordinates) => coordinates.iter().collect(),
|
||||
Position::Position6(coordinates) => coordinates.iter().collect(),
|
||||
Position::Position7(coordinates) => coordinates.iter().collect(),
|
||||
Position::Position8(coordinates) => coordinates.iter().collect(),
|
||||
Position::PositionN(coordinates) => coordinates.iter().collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -355,14 +423,15 @@ impl From<Vec<u64>> for Position {
|
||||
|
||||
impl From<Position> for Vec<f64> {
|
||||
fn from(position: Position) -> Self {
|
||||
let point: Vec<&Coordinate> = (&position).into();
|
||||
|
||||
point.into_iter().map(|c| c.into()).collect()
|
||||
(&position).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Position> for Vec<f64> {
|
||||
fn from(coordinates: &Position) -> Self {
|
||||
coordinates.clone().into()
|
||||
fn from(position: &Position) -> Self {
|
||||
let point: Vec<&Coordinate> = position.into();
|
||||
|
||||
point.into_iter().map(|c| c.into()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,37 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::Coordinate;
|
||||
use super::Position;
|
||||
use super::Space;
|
||||
|
||||
/// Known shapes descriptions
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub enum Shape {
|
||||
/// A singular point in space.
|
||||
Point(Position),
|
||||
//HyperRectangle([Position; MAX_K]),
|
||||
/// A sphere in space.
|
||||
HyperSphere(Position, Coordinate),
|
||||
|
||||
/// Hyperrectangle whose faces have one of the axis as a normal.
|
||||
BoundingBox(Position, Position),
|
||||
//Nifti(nifti_data??),
|
||||
}
|
||||
|
||||
impl Shape {
|
||||
/// Convert the encoded coordinates between two reference spaces.
|
||||
///
|
||||
/// The resulting shape is expressed in encoded coordinates in the
|
||||
/// target space.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `from`:
|
||||
/// Current reference space of the shape.
|
||||
///
|
||||
/// * `to`:
|
||||
/// Target reference space.
|
||||
pub fn rebase(&self, from: &Space, to: &Space) -> Result<Shape, String> {
|
||||
match self {
|
||||
Shape::Point(position) => Ok(Shape::Point(Space::change_base(position, from, to)?)),
|
||||
@@ -19,7 +39,7 @@ impl Shape {
|
||||
//FIXME: Is the length properly dealt with? How do we process this for space conversions?
|
||||
let mut r = Vec::with_capacity(center.dimensions());
|
||||
for _ in 0..center.dimensions() {
|
||||
r.push(radius.clone());
|
||||
r.push(*radius);
|
||||
}
|
||||
let r = r.into();
|
||||
let r = from.absolute_position(&r)?;
|
||||
@@ -33,6 +53,20 @@ impl Shape {
|
||||
}
|
||||
}
|
||||
|
||||
/// Decode the coordinates of the shape.
|
||||
///
|
||||
/// The encoded coordinates of the shapes are expressed in the
|
||||
/// provided space.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `space`:
|
||||
/// Reference space of the shape. It is used to decode the
|
||||
/// encoded coordinates into positions.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The shape with decoded positions within the space.
|
||||
pub fn decode(&self, space: &Space) -> Result<Shape, String> {
|
||||
let s = match self {
|
||||
Shape::Point(position) => Shape::Point(space.decode(position)?.into()),
|
||||
@@ -48,6 +82,19 @@ impl Shape {
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
/// Encode the positions of the shape.
|
||||
///
|
||||
/// The positions of the shapes are expressed in the provided space.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `space`:
|
||||
/// Reference space of the shape. It is used to encode the
|
||||
/// positions into encoded coordinates.
|
||||
///
|
||||
/// # Return value
|
||||
///
|
||||
/// The shape with encoded coordinates within the space.
|
||||
pub fn encode(&self, space: &Space) -> Result<Shape, String> {
|
||||
let s = match self {
|
||||
Shape::Point(position) => {
|
||||
@@ -69,6 +116,10 @@ impl Shape {
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
/// Compute the minimum bounding box of the shape.
|
||||
///
|
||||
/// This is an hyperrectangle whose faces are perpendicular to an
|
||||
/// axis of the space, and which minimally covers the shape.
|
||||
pub fn get_mbb(&self) -> (Position, Position) {
|
||||
match self {
|
||||
Shape::Point(position) => (position.clone(), position.clone()),
|
||||
@@ -78,13 +129,19 @@ impl Shape {
|
||||
for _ in 0..dimensions {
|
||||
vr.push(*radius);
|
||||
}
|
||||
let vr: Position = vr.into();
|
||||
(center.clone() - vr.clone(), center.clone() + vr)
|
||||
let vr: &Position = &vr.into();
|
||||
(center - vr, center + vr)
|
||||
}
|
||||
Shape::BoundingBox(lower, higher) => (lower.clone(), higher.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the shape overlaps with the given position.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `position`:
|
||||
/// The position to check.
|
||||
pub fn contains(&self, position: &Position) -> bool {
|
||||
match self {
|
||||
Shape::Point(reference) => reference == position,
|
||||
@@ -167,7 +224,7 @@ impl Shape {
|
||||
// Initialise the current value
|
||||
let mut current = lower.clone();
|
||||
|
||||
// Add the first Position to the results, as nxt will return the following one.
|
||||
// Add the first Position to the results, as next will return the following one.
|
||||
results.push(current.clone());
|
||||
while next(lower.dimensions(), &lower, higher, &mut current) {
|
||||
results.push(current.clone())
|
||||
@@ -175,9 +232,8 @@ impl Shape {
|
||||
results
|
||||
}
|
||||
|
||||
// Transform a Shape into a list of Position which approximate the shape.
|
||||
// Note:
|
||||
// * All output positions are expressed within the space.
|
||||
/// Transform a Shape into a list of `Position` which approximate
|
||||
/// the shape.
|
||||
// TODO: Return an iterator instead, for performance!
|
||||
pub fn rasterise(&self) -> Result<Vec<Position>, String> {
|
||||
match self {
|
||||
@@ -188,7 +244,7 @@ impl Shape {
|
||||
|
||||
let positions = Shape::gen(&lower, &higher)
|
||||
.into_iter()
|
||||
.filter(|p| (p.clone() - center.clone()).norm() <= radius)
|
||||
.filter(|p| (p - center).norm() <= radius)
|
||||
.collect();
|
||||
|
||||
Ok(positions)
|
||||
@@ -197,10 +253,14 @@ impl Shape {
|
||||
}
|
||||
}
|
||||
|
||||
// Transform a Shape into a list of Position which approximate the shape.
|
||||
// Note:
|
||||
// * All input positions are expressed within the space.
|
||||
// * All output positions are expressed in absolute positions in Universe
|
||||
/// Transform a Shape into a list of `Position` which approximate
|
||||
/// the shape, in absolute, or Universe positions.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `space`:
|
||||
/// Reference space in which the shape is expressed.
|
||||
///
|
||||
// TODO: Return an iterator instead, for performance!
|
||||
pub fn rasterise_from(&self, space: &Space) -> Result<Vec<Position>, String> {
|
||||
Ok(self
|
||||
@@ -213,9 +273,10 @@ impl Shape {
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Compute the volume.
|
||||
pub fn volume(&self) -> f64 {
|
||||
match self {
|
||||
Shape::Point(_) => std::f64::EPSILON, // Smallest non-zero volume possible
|
||||
Shape::Point(_) => f64::EPSILON, // Smallest non-zero volume possible
|
||||
Shape::BoundingBox(low, high) => {
|
||||
let mut volume = 1.0;
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryInto;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
|
||||
use ironsea_table_vector::VectorTable;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::space::Coordinate;
|
||||
use super::space::Position;
|
||||
use super::space::Shape;
|
||||
use super::space::Space;
|
||||
@@ -16,11 +16,11 @@ use super::space_index::SpaceIndex;
|
||||
use super::space_index::SpaceSetIndex;
|
||||
use super::space_index::SpaceSetObject;
|
||||
use super::CoreQueryParameters;
|
||||
use super::IterPositions;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpaceDB {
|
||||
reference_space: String,
|
||||
values: Vec<Coordinate>,
|
||||
resolutions: Vec<SpaceIndex>,
|
||||
}
|
||||
|
||||
@@ -35,26 +35,11 @@ impl SpaceDB {
|
||||
const DIMENSIONS: usize = 3;
|
||||
const CELL_BITS: usize = 10;
|
||||
|
||||
let mut values = space_objects
|
||||
.iter()
|
||||
.map(|object| *object.value())
|
||||
.collect::<HashSet<_>>()
|
||||
.drain()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
values.sort_unstable_by_key(|&c| c.u64());
|
||||
|
||||
space_objects.iter_mut().for_each(|object| {
|
||||
// Update the values to point into the local (shorter) mapping array.
|
||||
let val = values.binary_search(object.value()).unwrap();
|
||||
object.set_value(val.into());
|
||||
});
|
||||
|
||||
// Build the set of SpaceIndices.
|
||||
let mut resolutions = vec![];
|
||||
let mut indices = vec![];
|
||||
|
||||
if let Some(scales) = scales {
|
||||
if let Some(mut scales) = scales {
|
||||
// We optimize scaling, by iteratively building coarser and coarser
|
||||
// indexes. Powers holds a list of bit shift to apply based on the
|
||||
// previous value.
|
||||
@@ -62,12 +47,10 @@ impl SpaceDB {
|
||||
|
||||
// Limit temporary values lifetimes
|
||||
{
|
||||
// Sort by values, smaller to bigger.
|
||||
let mut exps = scales.clone();
|
||||
exps.sort_unstable_by_key(|v| v[0]);
|
||||
scales.sort_unstable_by_key(|v| v[0]);
|
||||
|
||||
let mut previous = 0u32;
|
||||
for scale in exps {
|
||||
for scale in scales {
|
||||
// FIXME: Remove these assertions ASAP, and support multi-factor scaling
|
||||
assert_eq!(scale.len(), DIMENSIONS);
|
||||
assert!(scale[0] == scale[1] && scale[0] == scale[2]);
|
||||
@@ -78,8 +61,7 @@ impl SpaceDB {
|
||||
}
|
||||
|
||||
// Apply fixed scales
|
||||
let mut count = 0;
|
||||
for power in &powers {
|
||||
for (count, power) in powers.iter().enumerate() {
|
||||
space_objects = space_objects
|
||||
.into_iter()
|
||||
.map(|mut o| {
|
||||
@@ -98,14 +80,9 @@ impl SpaceDB {
|
||||
.collect();
|
||||
|
||||
// Make sure we do not shift more position than available
|
||||
let shift = if count >= 31 { 31 } else { count };
|
||||
count += 1;
|
||||
let shift: u32 = if count >= 31 { 31 } else { count.try_into().unwrap() };
|
||||
indices.push((
|
||||
SpaceSetIndex::new(
|
||||
&VectorTable::new(space_objects.to_vec()),
|
||||
DIMENSIONS,
|
||||
CELL_BITS,
|
||||
),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![power.0, power.0, power.0],
|
||||
shift,
|
||||
));
|
||||
@@ -113,9 +90,6 @@ impl SpaceDB {
|
||||
} else {
|
||||
// Generate scales, following max_elements
|
||||
if let Some(max_elements) = max_elements {
|
||||
// We cannot return less that the total number of individual Ids stored
|
||||
// in the index for a full-volume query.
|
||||
let max_elements = max_elements.max(values.len());
|
||||
let mut count = 0;
|
||||
|
||||
// The next index should contain at most half the number of
|
||||
@@ -124,11 +98,7 @@ impl SpaceDB {
|
||||
|
||||
// Insert Full resolution index.
|
||||
indices.push((
|
||||
SpaceSetIndex::new(
|
||||
&VectorTable::new(space_objects.clone()),
|
||||
DIMENSIONS,
|
||||
CELL_BITS,
|
||||
),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![count, count, count],
|
||||
0, // Smallest value => highest resolution
|
||||
));
|
||||
@@ -136,8 +106,12 @@ impl SpaceDB {
|
||||
// Generate coarser indices, until we reach the expect max_element
|
||||
// values or we can't define bigger bit shift.
|
||||
loop {
|
||||
// Make sure we do not shift more position than available
|
||||
let shift = if count >= 31 { 31 } else { count };
|
||||
// Make sure we do not shift more position than available as well.
|
||||
if space_objects.len() <= max_elements || count > 31 {
|
||||
break;
|
||||
}
|
||||
let shift = count;
|
||||
|
||||
count += 1;
|
||||
space_objects = space_objects
|
||||
.into_iter()
|
||||
@@ -167,18 +141,10 @@ impl SpaceDB {
|
||||
}
|
||||
|
||||
indices.push((
|
||||
SpaceSetIndex::new(
|
||||
&VectorTable::new(space_objects.to_vec()),
|
||||
DIMENSIONS,
|
||||
CELL_BITS,
|
||||
),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![count, count, count],
|
||||
shift,
|
||||
));
|
||||
|
||||
if space_objects.len() <= max_elements || count == std::u32::MAX {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate indices as long as max is smaller than the number of point located in the whole space.
|
||||
@@ -186,7 +152,7 @@ impl SpaceDB {
|
||||
} else {
|
||||
// Generate only full-scale.
|
||||
indices.push((
|
||||
SpaceSetIndex::new(&VectorTable::new(space_objects), DIMENSIONS, CELL_BITS),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![0, 0, 0],
|
||||
0,
|
||||
));
|
||||
@@ -219,7 +185,6 @@ impl SpaceDB {
|
||||
|
||||
SpaceDB {
|
||||
reference_space: reference_space.name().clone(),
|
||||
values,
|
||||
resolutions,
|
||||
}
|
||||
}
|
||||
@@ -228,11 +193,13 @@ impl SpaceDB {
|
||||
&self.reference_space
|
||||
}
|
||||
|
||||
/* Comment this for now, as this is not yet used.
|
||||
// The smallest volume threshold, which is the highest resolution, will
|
||||
// be at position 0
|
||||
fn highest_resolution(&self) -> usize {
|
||||
0
|
||||
}
|
||||
*/
|
||||
|
||||
// The highest volume threshold, which is the lowest resolution, will
|
||||
// be at position len - 1
|
||||
@@ -240,11 +207,6 @@ impl SpaceDB {
|
||||
self.resolutions.len() - 1
|
||||
}
|
||||
|
||||
// Is this Space DB empty?
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.values.is_empty()
|
||||
}
|
||||
|
||||
fn resolution_from_volume(&self, volume: f64) -> usize {
|
||||
for i in 0..self.resolutions.len() {
|
||||
if volume <= self.resolutions[i].threshold() {
|
||||
@@ -315,62 +277,47 @@ impl SpaceDB {
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the value back to caller's references
|
||||
fn decode_value(&self, mut objects: Vec<SpaceSetObject>) -> Vec<SpaceSetObject> {
|
||||
for o in &mut objects {
|
||||
o.set_value(self.values[o.value().u64() as usize]);
|
||||
}
|
||||
|
||||
objects
|
||||
}
|
||||
|
||||
// Search by Id, a.k.a values
|
||||
// The results are in encoded space coordinates.
|
||||
pub fn get_by_id(
|
||||
&self,
|
||||
pub fn get_by_id<'s>(
|
||||
&'s self,
|
||||
id: usize,
|
||||
parameters: &CoreQueryParameters,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<IterPositions<'s>, String> {
|
||||
// Is that ID referenced in the current space?
|
||||
if let Ok(offset) = self.values.binary_search(&id.into()) {
|
||||
let index = self.resolution(parameters);
|
||||
let index = self.resolution(parameters);
|
||||
|
||||
// Convert the view port to the encoded space coordinates
|
||||
let space = parameters.db.space(&self.reference_space)?;
|
||||
let view_port = parameters.view_port(space);
|
||||
// Convert the view port to the encoded space coordinates
|
||||
let space = parameters.db.space(&self.reference_space)?;
|
||||
let view_port = parameters.view_port(space);
|
||||
|
||||
// Select the objects
|
||||
let objects = self.resolutions[index]
|
||||
.find_by_value(&SpaceFields::new(self.name().into(), offset.into()));
|
||||
// Select the objects
|
||||
// FIXME: How to return an iterator instead of instantiating all
|
||||
// the points here? Needed because of &SpaceFields.
|
||||
let objects = self.resolutions[index]
|
||||
.find_by_value(&SpaceFields::new(self.name(), id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut results = if let Some(view_port) = view_port {
|
||||
let results: IterPositions<'s> = if let Some(view_port) = view_port {
|
||||
Box::new(
|
||||
objects
|
||||
.into_iter()
|
||||
.filter(|o| view_port.contains(o.position()))
|
||||
.collect::<Vec<SpaceSetObject>>()
|
||||
} else {
|
||||
objects
|
||||
};
|
||||
|
||||
// Convert the Value back to caller's references
|
||||
// Here we do not use decode() as we have a single id value to manage.
|
||||
for o in &mut results {
|
||||
o.set_value(id.into());
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
.filter(move |position| view_port.contains(position)),
|
||||
)
|
||||
} else {
|
||||
Ok(vec![])
|
||||
}
|
||||
Box::new(objects.into_iter())
|
||||
};
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
// Search by positions defining a volume.
|
||||
// The position is expressed in encoded space coordinates, and results are in encoded space coordinates.
|
||||
pub fn get_by_positions(
|
||||
&self,
|
||||
positions: &[Position],
|
||||
pub fn get_by_positions<'s>(
|
||||
&'s self,
|
||||
positions: impl Iterator<Item = Position> + 's,
|
||||
parameters: &CoreQueryParameters,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Box<dyn Iterator<Item = (Position, &SpaceFields)> + 's>, String> {
|
||||
let index = self.resolution(parameters);
|
||||
|
||||
// FIXME: Should I do it here, or add the assumption this is a clean list?
|
||||
@@ -379,15 +326,13 @@ impl SpaceDB {
|
||||
//let view_port = parameters.view_port(space);
|
||||
|
||||
// Select the objects
|
||||
let results = positions
|
||||
.iter()
|
||||
.flat_map(|position| self.resolutions[index].find(position))
|
||||
.collect::<Vec<SpaceSetObject>>();
|
||||
let results = positions.flat_map(move |position| {
|
||||
self.resolutions[index]
|
||||
.find(&position)
|
||||
.map(move |fields| (position.clone(), fields))
|
||||
});
|
||||
|
||||
// Decode the Value reference
|
||||
let results = self.decode_value(results);
|
||||
|
||||
Ok(results)
|
||||
Ok(Box::new(results))
|
||||
}
|
||||
|
||||
// Search by Shape defining a volume:
|
||||
@@ -396,11 +341,11 @@ impl SpaceDB {
|
||||
// * Point (Specific position)
|
||||
|
||||
// The Shape is expressed in encoded space coordinates, and results are in encoded space coordinates.
|
||||
pub fn get_by_shape(
|
||||
&self,
|
||||
shape: &Shape,
|
||||
pub fn get_by_shape<'s>(
|
||||
&'s self,
|
||||
shape: Shape,
|
||||
parameters: &CoreQueryParameters,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Box<dyn Iterator<Item = (Position, &SpaceFields)> + 's>, String> {
|
||||
let index = self.resolution(parameters);
|
||||
|
||||
// Convert the view port to the encoded space coordinates
|
||||
@@ -408,10 +353,7 @@ impl SpaceDB {
|
||||
let view_port = parameters.view_port(space);
|
||||
|
||||
// Select the objects
|
||||
let results = self.resolutions[index].find_by_shape(&shape, &view_port)?;
|
||||
|
||||
// Decode the Value reference
|
||||
let results = self.decode_value(results);
|
||||
let results = self.resolutions[index].find_by_shape(shape, &view_port)?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
@@ -1,22 +1,23 @@
|
||||
use std::cmp::Ord;
|
||||
|
||||
use ironsea_index::IndexedOwned;
|
||||
use ironsea_table_vector::VectorTable;
|
||||
use ironsea_index::IndexedDestructured;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::space::Coordinate;
|
||||
use super::space::Position;
|
||||
use super::space::Shape;
|
||||
use super::SpaceId;
|
||||
use super::IterPositions;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Hash, Serialize)]
|
||||
#[derive(Clone, Debug, Hash)]
|
||||
pub struct SpaceSetObject {
|
||||
space_id: SpaceId,
|
||||
space_id: String,
|
||||
position: Position,
|
||||
value: Coordinate, // Efficiently store the offset within the SpaceDB values vector
|
||||
value: usize,
|
||||
}
|
||||
|
||||
impl SpaceSetObject {
|
||||
pub fn new(reference_space: &str, position: Position, value: Coordinate) -> Self {
|
||||
pub fn new(reference_space: &str, position: Position, value: usize) -> Self {
|
||||
SpaceSetObject {
|
||||
space_id: reference_space.into(),
|
||||
position,
|
||||
@@ -24,11 +25,7 @@ impl SpaceSetObject {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &Coordinate {
|
||||
&self.value
|
||||
}
|
||||
|
||||
pub fn space_id(&self) -> &SpaceId {
|
||||
pub fn space_id(&self) -> &String {
|
||||
&self.space_id
|
||||
}
|
||||
|
||||
@@ -40,40 +37,59 @@ impl SpaceSetObject {
|
||||
self.position = pos;
|
||||
}
|
||||
|
||||
pub fn value(&self) -> &Coordinate {
|
||||
&self.value
|
||||
pub fn value(&self) -> usize {
|
||||
self.value
|
||||
}
|
||||
|
||||
pub fn set_value(&mut self, value: Coordinate) {
|
||||
pub fn set_value(&mut self, value: usize) {
|
||||
self.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpaceFields {
|
||||
space_id: SpaceId,
|
||||
value: Coordinate,
|
||||
space_id: String,
|
||||
value: usize,
|
||||
}
|
||||
|
||||
impl SpaceFields {
|
||||
pub fn new(space_id: SpaceId, value: Coordinate) -> Self {
|
||||
SpaceFields { space_id, value }
|
||||
pub fn new(space_id: &str, value: usize) -> Self {
|
||||
SpaceFields {
|
||||
space_id: space_id.into(),
|
||||
value,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value(&self) -> usize {
|
||||
self.value
|
||||
}
|
||||
|
||||
pub fn set_value(&mut self, value: usize) {
|
||||
self.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for SpaceFields {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.space_id == other.space_id && self.value == other.value
|
||||
// WARNING: We ignore the spaceID, as we know it will always be true
|
||||
// because of our usage of this.
|
||||
|
||||
// This assumption has to be maintained or the test added back.
|
||||
//self.value == other.value
|
||||
|
||||
// First compare on the number field (cheap and fast), then do the String comparison.
|
||||
// Safety first
|
||||
self.value == other.value && self.space_id == other.space_id
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::Record<Position> for SpaceSetObject {
|
||||
impl ironsea_index::Record<Position> for &SpaceSetObject {
|
||||
fn key(&self) -> Position {
|
||||
self.position.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::RecordFields<SpaceFields> for SpaceSetObject {
|
||||
impl ironsea_index::RecordFields<SpaceFields> for &SpaceSetObject {
|
||||
fn fields(&self) -> SpaceFields {
|
||||
SpaceFields {
|
||||
space_id: self.space_id().clone(),
|
||||
@@ -82,23 +98,7 @@ impl ironsea_index::RecordFields<SpaceFields> for SpaceSetObject {
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::RecordBuild<Position, SpaceFields, SpaceSetObject> for SpaceSetObject {
|
||||
fn build(key: &Position, fields: &SpaceFields) -> SpaceSetObject {
|
||||
SpaceSetObject {
|
||||
space_id: fields.space_id.clone(),
|
||||
position: key.clone(),
|
||||
value: fields.value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type SpaceSetIndex = ironsea_index_sfc_dbc::IndexOwned<
|
||||
VectorTable<SpaceSetObject>,
|
||||
SpaceSetObject,
|
||||
Position,
|
||||
Coordinate,
|
||||
SpaceFields,
|
||||
>;
|
||||
pub type SpaceSetIndex = ironsea_index_sfc_dbc::IndexOwned<SpaceFields, Position, Coordinate>;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpaceIndex {
|
||||
@@ -126,58 +126,63 @@ impl SpaceIndex {
|
||||
}
|
||||
|
||||
// Inputs and Results are expressed in encoded space coordinates.
|
||||
pub fn find(&self, key: &Position) -> Vec<SpaceSetObject> {
|
||||
pub fn find<'s>(&'s self, key: &Position) -> Box<dyn Iterator<Item = &SpaceFields> + 's> {
|
||||
self.index.find(key)
|
||||
}
|
||||
|
||||
// Inputs and Results are expressed in encoded space coordinates.
|
||||
fn find_range(&self, start: &Position, end: &Position) -> Vec<SpaceSetObject> {
|
||||
fn find_range<'s>(
|
||||
&'s self,
|
||||
start: &Position,
|
||||
end: &Position,
|
||||
) -> Box<dyn Iterator<Item = (Position, &SpaceFields)> + 's> {
|
||||
self.index.find_range(start, end)
|
||||
}
|
||||
|
||||
// Inputs and Results are expressed in encoded space coordinates.
|
||||
pub fn find_by_value(&self, id: &SpaceFields) -> Vec<SpaceSetObject> {
|
||||
pub fn find_by_value<'s>(&'s self, id: &'s SpaceFields) -> IterPositions<'s> {
|
||||
self.index.find_by_value(id)
|
||||
}
|
||||
|
||||
/// Inputs and Results are also in encoded space coordinates.
|
||||
pub fn find_by_shape(
|
||||
&self,
|
||||
shape: &Shape,
|
||||
// Inputs and Results are also in encoded space coordinates.
|
||||
pub fn find_by_shape<'s>(
|
||||
&'s self,
|
||||
shape: Shape,
|
||||
view_port: &Option<Shape>,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Box<dyn Iterator<Item = (Position, &SpaceFields)> + 's>, String> {
|
||||
match shape {
|
||||
Shape::Point(position) => {
|
||||
if let Some(mbb) = view_port {
|
||||
if mbb.contains(position) {
|
||||
Ok(self.find(position))
|
||||
} else {
|
||||
Err(format!(
|
||||
if !mbb.contains(&position) {
|
||||
return Err(format!(
|
||||
"View port '{:?}' does not contain '{:?}'",
|
||||
mbb, position
|
||||
))
|
||||
));
|
||||
}
|
||||
} else {
|
||||
Ok(self.find(position))
|
||||
}
|
||||
Ok(Box::new(
|
||||
self.find(&position)
|
||||
.map(move |fields| (position.clone(), fields)),
|
||||
))
|
||||
}
|
||||
Shape::BoundingBox(bl, bh) => {
|
||||
if let Some(mbb) = view_port {
|
||||
match mbb {
|
||||
Shape::BoundingBox(vl, vh) => {
|
||||
// Compute the intersection of the two boxes.
|
||||
let lower = bl.max(vl);
|
||||
let higher = bh.min(vh);
|
||||
let lower = (&bl).max(vl);
|
||||
let higher = (&bh).min(vh);
|
||||
if higher < lower {
|
||||
Err(format!(
|
||||
"View port '{:?}' does not intersect '{:?}'",
|
||||
mbb, shape
|
||||
mbb,
|
||||
Shape::BoundingBox(bl.clone(), bh.clone())
|
||||
))
|
||||
} else {
|
||||
trace!(
|
||||
"mbb {:?} shape {:?} lower {:?} higher {:?}",
|
||||
mbb,
|
||||
shape,
|
||||
Shape::BoundingBox(bl.clone(), bh.clone()),
|
||||
lower,
|
||||
higher
|
||||
);
|
||||
@@ -187,11 +192,11 @@ impl SpaceIndex {
|
||||
_ => Err(format!("Invalid view port shape '{:?}'", mbb)),
|
||||
}
|
||||
} else {
|
||||
Ok(self.find_range(bl, bh))
|
||||
Ok(self.find_range(&bl, &bh))
|
||||
}
|
||||
}
|
||||
Shape::HyperSphere(center, radius) => {
|
||||
let (bl, bh) = &shape.get_mbb();
|
||||
let (bl, bh) = Shape::HyperSphere(center.clone(), radius).get_mbb();
|
||||
let lower;
|
||||
let higher;
|
||||
|
||||
@@ -199,26 +204,24 @@ impl SpaceIndex {
|
||||
match mbb {
|
||||
Shape::BoundingBox(vl, vh) => {
|
||||
// Compute the intersection of the two boxes.
|
||||
lower = bl.max(vl);
|
||||
higher = bh.min(vh);
|
||||
lower = (&bl).max(vl);
|
||||
higher = (&bh).min(vh);
|
||||
}
|
||||
_ => return Err(format!("Invalid view port shape '{:?}'", mbb)),
|
||||
}
|
||||
} else {
|
||||
lower = bl;
|
||||
higher = bh;
|
||||
lower = &bl;
|
||||
higher = &bh;
|
||||
}
|
||||
|
||||
// Filter out results using using a range query over the MBB,
|
||||
// then add the condition of the radius as we are working within
|
||||
// a sphere.
|
||||
let results = self
|
||||
.find_range(&lower, &higher)
|
||||
.into_iter()
|
||||
.filter(|p| (p.position().clone() - center.clone()).norm() <= radius.f64())
|
||||
.collect();
|
||||
.find_range(lower, higher)
|
||||
.filter(move |(position, _)| (position - ¢er).norm() <= radius.f64());
|
||||
|
||||
Ok(results)
|
||||
Ok(Box::new(results))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod model;
|
||||
pub mod storage;
|
||||
@@ -1,224 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::database;
|
||||
use database::space;
|
||||
use database::Core;
|
||||
use database::DataBase;
|
||||
use database::SpaceObject;
|
||||
use database::SpaceSetObject;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Space {
|
||||
pub name: String,
|
||||
pub origin: Vec<f64>,
|
||||
pub axes: Vec<Axis>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Axis {
|
||||
pub measurement_unit: String,
|
||||
pub graduation: Graduation,
|
||||
pub unit_vector: Vec<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Graduation {
|
||||
pub set: String,
|
||||
pub minimum: f64,
|
||||
pub maximum: f64,
|
||||
pub steps: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpatialObject {
|
||||
pub properties: Properties,
|
||||
pub shapes: Vec<Shape>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Shape {
|
||||
#[serde(rename = "type")]
|
||||
pub type_name: String,
|
||||
#[serde(rename = "space")]
|
||||
pub reference_space: String,
|
||||
pub vertices: Vec<Point>,
|
||||
}
|
||||
|
||||
type Point = Vec<f64>;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Properties {
|
||||
#[serde(rename = "type")]
|
||||
pub type_name: String,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
impl From<&space::Graduation> for Graduation {
|
||||
fn from(g: &space::Graduation) -> Self {
|
||||
Graduation {
|
||||
set: g.set.clone().into(),
|
||||
minimum: g.minimum,
|
||||
maximum: g.maximum,
|
||||
steps: g.steps,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Axis> for space::Axis {
|
||||
fn from(axis: Axis) -> Self {
|
||||
let g = axis.graduation;
|
||||
|
||||
space::Axis::new(
|
||||
&axis.measurement_unit,
|
||||
axis.unit_vector,
|
||||
g.set.into(),
|
||||
g.minimum,
|
||||
g.maximum,
|
||||
g.steps,
|
||||
)
|
||||
.unwrap_or_else(|e| panic!("Unable to create Axis as defined: {}", e))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&space::Axis> for Axis {
|
||||
fn from(axis: &space::Axis) -> Self {
|
||||
Axis {
|
||||
measurement_unit: axis.measurement_unit(),
|
||||
graduation: axis.graduation().into(),
|
||||
unit_vector: axis.unit_vector().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Space> for space::Space {
|
||||
fn from(space: &Space) -> Self {
|
||||
let axes = space
|
||||
.axes
|
||||
.iter()
|
||||
.map(|a| a.clone().into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let system = space::CoordinateSystem::new(space.origin.clone(), axes);
|
||||
|
||||
space::Space::new(&space.name, system)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&space::Space> for Space {
|
||||
fn from(space: &space::Space) -> Self {
|
||||
let axes = space.axes().iter().map(|a| a.into()).collect::<Vec<_>>();
|
||||
|
||||
Space {
|
||||
name: space.name().clone(),
|
||||
origin: space.origin().into(),
|
||||
axes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_spatial_objects(db: &DataBase, list: Vec<SpaceObject>) -> Vec<SpatialObject> {
|
||||
// Filter per Properties, in order to regroup by it, then build a single SpatialObject per Properties.
|
||||
let mut properties = HashMap::new();
|
||||
for object in list {
|
||||
let k = object.value.id().clone();
|
||||
properties.entry(k).or_insert_with(|| vec![]).push(object);
|
||||
}
|
||||
|
||||
let mut results = vec![];
|
||||
for (k, v) in properties.iter() {
|
||||
// Group by spaces, to collect points shapes together
|
||||
let shapes = v
|
||||
.iter()
|
||||
.filter_map(|o| match db.space(&o.space_id) {
|
||||
Err(_) => None,
|
||||
Ok(space) => {
|
||||
if let Ok(vertices) = space.decode(&o.position) {
|
||||
Some(Shape {
|
||||
type_name: "Point".to_string(),
|
||||
reference_space: o.space_id.clone(),
|
||||
vertices: vec![vertices],
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
results.push(SpatialObject {
|
||||
properties: Properties {
|
||||
type_name: "Feature".to_string(),
|
||||
id: k.to_string(),
|
||||
},
|
||||
shapes,
|
||||
});
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
pub fn build_index(
|
||||
name: &str,
|
||||
version: &str,
|
||||
spaces: &[space::Space],
|
||||
objects: &[SpatialObject],
|
||||
scales: Option<Vec<Vec<u32>>>,
|
||||
max_elements: Option<usize>,
|
||||
) -> Core {
|
||||
let mut properties = vec![];
|
||||
let mut space_set_objects = vec![];
|
||||
|
||||
let mut properties_ref = vec![];
|
||||
|
||||
{
|
||||
let mut properties_hm = HashMap::new();
|
||||
|
||||
for object in objects {
|
||||
let value = match properties_hm.get(object.properties.id.as_str()) {
|
||||
Some(_) => {
|
||||
properties_ref.push(object.properties.id.as_str());
|
||||
properties_ref.len() - 1
|
||||
}
|
||||
None => {
|
||||
properties_hm.insert(
|
||||
object.properties.id.as_str(),
|
||||
database::Properties::Feature(object.properties.id.clone()),
|
||||
);
|
||||
|
||||
properties_ref.push(object.properties.id.as_str());
|
||||
properties_ref.len() - 1
|
||||
}
|
||||
};
|
||||
|
||||
for point in &object.shapes {
|
||||
assert_eq!(point.type_name, "Point");
|
||||
|
||||
space_set_objects.push(SpaceSetObject::new(
|
||||
&point.reference_space,
|
||||
point.vertices[0].clone().into(),
|
||||
value.into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
properties.append(&mut properties_hm.drain().map(|(_, v)| v).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
properties.sort_unstable_by_key(|p| p.id().clone());
|
||||
|
||||
space_set_objects.iter_mut().for_each(|object| {
|
||||
let id = properties_ref[object.value().u64() as usize];
|
||||
let value = properties.binary_search_by_key(&id, |p| p.id()).unwrap();
|
||||
object.set_value(value.into());
|
||||
});
|
||||
|
||||
Core::new(
|
||||
name,
|
||||
version,
|
||||
spaces,
|
||||
properties,
|
||||
space_set_objects,
|
||||
scales,
|
||||
max_elements,
|
||||
)
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
|
||||
use memmap::Mmap;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::json::model;
|
||||
|
||||
pub fn from_json<T>(from: &str, to: &str)
|
||||
where
|
||||
T: Serialize + DeserializeOwned,
|
||||
{
|
||||
let file_in =
|
||||
File::open(from).unwrap_or_else(|e| panic!("Unable to read file: {}: {}", from, e));
|
||||
let file_out =
|
||||
File::create(to).unwrap_or_else(|e| panic!("Unable to create file: {}: {}", to, e));
|
||||
|
||||
// We create a buffered writer from the file we get
|
||||
let writer = BufWriter::new(&file_out);
|
||||
|
||||
let mmap = unsafe {
|
||||
Mmap::map(&file_in)
|
||||
.unwrap_or_else(|e| panic!("Unable to map in memory the file: {}: {}", from, e))
|
||||
};
|
||||
let v: T = serde_json::from_slice(&mmap[..])
|
||||
.unwrap_or_else(|e| panic!("Unable to parse the json data from: {}: {}", from, e));
|
||||
|
||||
bincode::serialize_into(writer, &v).unwrap();
|
||||
}
|
||||
|
||||
//FIXME: Move to ironsea_store?
|
||||
pub fn load<T>(from: &str) -> T
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
let file_in =
|
||||
File::open(from).unwrap_or_else(|e| panic!("Unable to read file: {}: {}", from, e));
|
||||
|
||||
let mmap = unsafe {
|
||||
Mmap::map(&file_in)
|
||||
.unwrap_or_else(|e| panic!("Unable to map in memory the file: {}: {}", from, e))
|
||||
};
|
||||
|
||||
bincode::deserialize(&mmap[..])
|
||||
.unwrap_or_else(|e| panic!("Unable to parse the json data from: {}: {}", from, e))
|
||||
}
|
||||
|
||||
//FIXME: Move to ironsea_store?
|
||||
pub fn store<T>(data: T, to: &str)
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let file_out =
|
||||
File::create(to).unwrap_or_else(|e| panic!("Unable to create file: {}: {}", to, e));
|
||||
|
||||
// We create a buffered writer from the file we get
|
||||
let writer = BufWriter::new(&file_out);
|
||||
|
||||
bincode::serialize_into(writer, &data).unwrap();
|
||||
}
|
||||
|
||||
pub fn convert(name: &str) {
|
||||
// Convert Reference Space definitions
|
||||
let fn_in = format!("{}.spaces.json", name);
|
||||
let fn_out = format!("{}.spaces.bin", name);
|
||||
|
||||
from_json::<Vec<model::Space>>(&fn_in, &fn_out);
|
||||
|
||||
// Convert Spatial Objects
|
||||
let fn_in = format!("{}.objects.json", name);
|
||||
let fn_out = format!("{}.objects.bin", name);
|
||||
|
||||
from_json::<Vec<model::SpatialObject>>(&fn_in, &fn_out);
|
||||
}
|
||||
|
||||
pub fn build(
|
||||
name: &str,
|
||||
version: &str,
|
||||
scales: Option<Vec<Vec<u32>>>,
|
||||
max_elements: Option<usize>,
|
||||
) {
|
||||
let fn_spaces = format!("{}.spaces.bin", name);
|
||||
let fn_objects = format!("{}.objects.bin", name);
|
||||
let fn_index = format!("{}.index", name);
|
||||
|
||||
let spaces = load::<Vec<model::Space>>(&fn_spaces)
|
||||
.iter()
|
||||
.map(|s| s.into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let core = model::build_index(
|
||||
name,
|
||||
version,
|
||||
&spaces,
|
||||
&load::<Vec<model::SpatialObject>>(&fn_objects),
|
||||
scales,
|
||||
max_elements,
|
||||
);
|
||||
|
||||
store((spaces, core), &fn_index);
|
||||
}
|
||||
35
src/lib.rs
35
src/lib.rs
@@ -1,3 +1,33 @@
|
||||
#![deny(missing_docs)]
|
||||
|
||||
//! # Mercator DB
|
||||
//!
|
||||
//! Database model for the Mercator spatial index.
|
||||
//!
|
||||
//! ## Mercator: Spatial Index
|
||||
//!
|
||||
//! **Mercator** is a spatial *volumetric* index for the
|
||||
//! [Human Brain Project]. It is a component of the [Knowledge Graph]
|
||||
//! service, which provides the spatial anchoring for the metadata
|
||||
//! registered as well as processes the volumetric queries.
|
||||
//!
|
||||
//! It is build on top of the Iron Sea database toolkit.
|
||||
//!
|
||||
//! ## Iron Sea: Database Toolkit
|
||||
//! **Iron Sea** provides a set of database engine bricks, which can be
|
||||
//! combined and applied on arbitrary data structures.
|
||||
//!
|
||||
//! Unlike a traditional database, it does not assume a specific
|
||||
//! physical structure for the tables nor the records, but relies on the
|
||||
//! developer to provide a set of extractor functions which are used by
|
||||
//! the specific indices provided.
|
||||
//!
|
||||
//! This enables the index implementations to be agnostic from the
|
||||
//! underlying data structure, and re-used.
|
||||
//!
|
||||
//! [Human Brain Project]: http://www.humanbrainproject.eu
|
||||
//! [Knowledge Graph]: http://www.humanbrainproject.eu/en/explore-the-brain/search/
|
||||
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
|
||||
@@ -7,10 +37,7 @@ extern crate log;
|
||||
#[macro_use]
|
||||
extern crate arrayref;
|
||||
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
mod database;
|
||||
pub mod json;
|
||||
pub mod storage;
|
||||
|
||||
pub use database::*;
|
||||
|
||||
61
src/main.rs
61
src/main.rs
@@ -1,8 +1,8 @@
|
||||
#[macro_use]
|
||||
extern crate measure_time;
|
||||
|
||||
use mercator_db::json::storage;
|
||||
use mercator_db::space::Shape;
|
||||
use mercator_db::storage;
|
||||
use mercator_db::CoreQueryParameters;
|
||||
use mercator_db::DataBase;
|
||||
|
||||
@@ -16,13 +16,15 @@ fn main() {
|
||||
// Convert to binary the JSON data:
|
||||
if true {
|
||||
info_time!("Converting to binary JSON data");
|
||||
storage::convert("10k");
|
||||
storage::json::from::<Vec<mercator_db::storage::model::Space>>("10k.spaces").unwrap();
|
||||
storage::json::from::<Vec<mercator_db::storage::model::v1::SpatialObject>>("10k.objects")
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Build a Database Index:
|
||||
if true {
|
||||
info_time!("Building database index");
|
||||
storage::build("10k", "v0.1", None, None);
|
||||
storage::bincode::build("10k", "v0.1", None, None).unwrap();
|
||||
}
|
||||
|
||||
// Load a Database:
|
||||
@@ -41,34 +43,34 @@ fn main() {
|
||||
output_space: None,
|
||||
threshold_volume: Some(std::f64::MAX),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_id(&c, id).unwrap();
|
||||
println!("get_by_id {}: {}", id, r.len());
|
||||
println!("{}: {:?}\n", id, r[0]);
|
||||
println!("{}: {:?}\n", id, r[0].1[0]);
|
||||
|
||||
let c = CoreQueryParameters {
|
||||
db: &db,
|
||||
output_space: None,
|
||||
threshold_volume: Some(0.0),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_id(&c, id).unwrap();
|
||||
println!("get_by_id {}: {}", id, r.len());
|
||||
println!("{}: {:?}\n", id, r[0]);
|
||||
println!("{}: {:?}\n", id, r[0].1[0]);
|
||||
|
||||
let c = CoreQueryParameters {
|
||||
db: &db,
|
||||
output_space: None,
|
||||
threshold_volume: Some(std::f64::MAX),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_label(&c, id).unwrap();
|
||||
println!("get_by_label {}: {}", id, r.len());
|
||||
if !r.is_empty() {
|
||||
println!("{}: {:?}\n", id, r[0]);
|
||||
println!("{}: {:?}\n", id, r); // no overlaping point, so no results
|
||||
}
|
||||
|
||||
let lower = space.encode(&[0.2, 0.2, 0.2]).unwrap();
|
||||
@@ -81,19 +83,45 @@ fn main() {
|
||||
output_space: None,
|
||||
threshold_volume: Some(0.0),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_shape(&c, &shape, "std").unwrap();
|
||||
println!("get_by_shape {:?}: {}", shape, r.len());
|
||||
println!("{:?}: {:?}\n", shape, r[0]);
|
||||
println!("{:?}: {:?}\n", shape, r[0].1[0]);
|
||||
|
||||
let a = r.iter().filter(|o| o.value.id() == id).collect::<Vec<_>>();
|
||||
let a = r
|
||||
.iter()
|
||||
.filter_map(|(space, v)| {
|
||||
let v = v
|
||||
.iter()
|
||||
.filter(|(_, properties)| properties.id() == id)
|
||||
.collect::<Vec<_>>();
|
||||
if v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((space, v))
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
println!("get_by_shape A {:?} filtered on {}: {}", shape, id, a.len());
|
||||
if !a.is_empty() {
|
||||
println!("{:?}\n", a[0]);
|
||||
println!("{:?}\n", a[0].1[0]);
|
||||
}
|
||||
|
||||
let a = r.iter().filter(|o| o.value.id() != id).collect::<Vec<_>>();
|
||||
let a = r
|
||||
.iter()
|
||||
.filter_map(|(space, v)| {
|
||||
let v = v
|
||||
.iter()
|
||||
.filter(|(_, properties)| properties.id() != id)
|
||||
.collect::<Vec<_>>();
|
||||
if v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((space, v))
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
println!(
|
||||
"get_by_shape !A {:?} filtered on {}: {}",
|
||||
shape,
|
||||
@@ -101,16 +129,17 @@ fn main() {
|
||||
a.len()
|
||||
);
|
||||
if !a.is_empty() {
|
||||
println!("{:?}\n", a[0]);
|
||||
println!("{:?}\n", a[0].1[0]);
|
||||
}
|
||||
|
||||
println!(
|
||||
"\nSPACE OBJECT:\n\n{}",
|
||||
serde_json::to_string_pretty(space).unwrap()
|
||||
);
|
||||
//FIXME: Not returning SpatialObjects by default
|
||||
println!(
|
||||
"\nSPATIAL OBJECT:\n\n{}",
|
||||
serde_json::to_string_pretty(a[0]).unwrap()
|
||||
serde_json::to_string_pretty(&a[0]).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
118
src/storage/bincode.rs
Normal file
118
src/storage/bincode.rs
Normal file
@@ -0,0 +1,118 @@
|
||||
//! Bincode support
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Error;
|
||||
use std::io::ErrorKind;
|
||||
|
||||
use memmap::Mmap;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::model;
|
||||
|
||||
/// Deserialize a data structure.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `from`:
|
||||
/// File to read, which contains Bincode data.
|
||||
pub fn load<T>(from: &str) -> Result<T, Error>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
let file_in = File::open(from)?;
|
||||
|
||||
let mmap = unsafe { Mmap::map(&file_in)? };
|
||||
|
||||
match bincode::deserialize(&mmap[..]) {
|
||||
Ok(data) => Ok(data),
|
||||
Err(e) => Err(Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("Bincode could not deserialize: {:?}", e),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize a data structure.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `data`:
|
||||
/// Data to serialize.
|
||||
///
|
||||
/// * `to`:
|
||||
/// File to use to store the serialized data.
|
||||
pub fn store<T>(data: T, to: &str) -> Result<(), Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let file_out = File::create(to)?;
|
||||
|
||||
// We create a buffered writer from the file we get
|
||||
let writer = BufWriter::new(&file_out);
|
||||
|
||||
match bincode::serialize_into(writer, &data) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) => Err(Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("Bincode could not serialize: {:?}", e),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build an index from the input files.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// Index name, this value will also be used to generate file names
|
||||
/// as such:
|
||||
/// * `.spaces.bin` and `.objects.bin` will be appended for the
|
||||
/// input files.
|
||||
/// * `.index` will be appended for the index file.
|
||||
///
|
||||
/// * `version`:
|
||||
/// Parameter to distinguish revisions of an index.
|
||||
///
|
||||
/// * `scales`:
|
||||
/// An optional list of specific index resolutions to generates on
|
||||
/// top of the full resolution one.
|
||||
///
|
||||
/// * `max_elements`:
|
||||
/// If this is specified, automatically generates scaled indices, by
|
||||
/// halving the number elements between resolutions, and stop
|
||||
/// generating indices either when the number of points remaining is
|
||||
/// equal to the number of distinct Ids, or smaller or equal to this
|
||||
/// value.
|
||||
///
|
||||
/// **Note**: `max_elements` is ignored when `scales` is not `None`.
|
||||
pub fn build(
|
||||
name: &str,
|
||||
version: &str,
|
||||
scales: Option<Vec<Vec<u32>>>,
|
||||
max_elements: Option<usize>,
|
||||
) -> Result<(), Error> {
|
||||
let fn_spaces = format!("{}.spaces.bin", name);
|
||||
let fn_objects = format!("{}.objects.bin", name);
|
||||
let fn_index = format!("{}.index", name);
|
||||
|
||||
let spaces = load::<Vec<model::Space>>(&fn_spaces)?
|
||||
.iter()
|
||||
.map(|s| s.into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let objects = load::<Vec<model::SpatialObject>>(&fn_objects)?;
|
||||
|
||||
let core = match model::build_index(name, version, &spaces, &objects, scales, max_elements) {
|
||||
Ok(core) => core,
|
||||
Err(e) => {
|
||||
return Err(Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("Failure to build index: {:?}", e),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
store((spaces, core), &fn_index)
|
||||
}
|
||||
51
src/storage/json.rs
Normal file
51
src/storage/json.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
//! JSON support
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Error;
|
||||
use std::io::ErrorKind;
|
||||
|
||||
use memmap::Mmap;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
|
||||
fn convert<T>(from: &str, to: &str) -> Result<(), Error>
|
||||
where
|
||||
T: Serialize + DeserializeOwned,
|
||||
{
|
||||
let file_in = File::open(from)?;
|
||||
let file_out = File::create(to)?;
|
||||
|
||||
// We create a buffered writer from the file we get
|
||||
let writer = BufWriter::new(&file_out);
|
||||
|
||||
let mmap = unsafe { Mmap::map(&file_in)? };
|
||||
let v: T = serde_json::from_slice(&mmap[..])?;
|
||||
|
||||
match bincode::serialize_into(writer, &v) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) => Err(Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("Bincode could not serialize: {:?}", e),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialise a JSON file.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// Base name of the file,
|
||||
/// * `.xyz` will be automatically appended for the source file, while
|
||||
/// * `.bin` will be appended for the output file.
|
||||
pub fn from<T>(name: &str) -> Result<(), Error>
|
||||
where
|
||||
T: Serialize + DeserializeOwned,
|
||||
{
|
||||
// Convert definitions from json to bincode
|
||||
let fn_in = format!("{}.json", name);
|
||||
let fn_out = format!("{}.bin", name);
|
||||
|
||||
convert::<T>(&fn_in, &fn_out)
|
||||
}
|
||||
9
src/storage/mod.rs
Normal file
9
src/storage/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
//! Persistent data functions and types.
|
||||
//!
|
||||
//! Serialisation / deserialisation functions and structures used to
|
||||
//! store and manipulate indices and data.
|
||||
|
||||
pub mod bincode;
|
||||
pub mod json;
|
||||
pub mod model;
|
||||
pub mod xyz;
|
||||
476
src/storage/model.rs
Normal file
476
src/storage/model.rs
Normal file
@@ -0,0 +1,476 @@
|
||||
//! Model definitions for serialisation.
|
||||
//!
|
||||
//! The following definitions are used as part of the serialisation
|
||||
//! process to exchange objects either through network or to storage.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::database;
|
||||
use database::space;
|
||||
use database::space_index::SpaceSetObject;
|
||||
use database::Core;
|
||||
|
||||
/// Reference space definition.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Space {
|
||||
/// **Id** of the space.
|
||||
pub name: String,
|
||||
|
||||
/// Position of the origin of axis expressed in Universe coordinates.
|
||||
pub origin: Vec<f64>,
|
||||
|
||||
/// List of axes of the space.
|
||||
pub axes: Vec<Axis>,
|
||||
}
|
||||
|
||||
/// Reference space axis definition.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Axis {
|
||||
/// Length unit for the value `1.0`.
|
||||
pub measurement_unit: String,
|
||||
|
||||
/// Define the valid range of number on this axis.
|
||||
pub graduation: Graduation,
|
||||
|
||||
/// Vector which defines the direction of the axis in the Universe
|
||||
pub unit_vector: Vec<f64>,
|
||||
}
|
||||
|
||||
/// Valid range of numbers on the axis.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Graduation {
|
||||
/// Mathematical Number Set of numbers allowed.
|
||||
pub set: String,
|
||||
|
||||
/// Minimum value allowed, included.
|
||||
pub minimum: f64,
|
||||
|
||||
/// Maximum value allowed, excluded.
|
||||
pub maximum: f64,
|
||||
|
||||
/// Number of distinct positions between `[min; max[`
|
||||
pub steps: u64,
|
||||
}
|
||||
|
||||
/// A single spatial location.
|
||||
///
|
||||
/// This has a value per dimension of the space it is expressed in.
|
||||
pub type Point = Vec<f64>;
|
||||
|
||||
pub mod v1 {
|
||||
//! REST API objects, v1.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::database;
|
||||
use super::space;
|
||||
use super::Point;
|
||||
use super::Properties;
|
||||
|
||||
/// Links Properties to a list of spatial volumes.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpatialObject {
|
||||
/// Definition of the `properties` to tag in space.
|
||||
pub properties: Properties,
|
||||
|
||||
/// List of volumes associated with `properties`.
|
||||
pub shapes: Vec<Shape>,
|
||||
}
|
||||
|
||||
/// Define a Shape, within a specific reference space.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Shape {
|
||||
/// Type of the shape, which is used to interpret the list of
|
||||
/// `vertices`.
|
||||
#[serde(rename = "type")]
|
||||
pub type_name: String,
|
||||
|
||||
/// Id of the reference space the points are defined in.
|
||||
#[serde(rename = "space")]
|
||||
pub reference_space: String,
|
||||
|
||||
/// List of spatial positions.
|
||||
pub vertices: Vec<Point>,
|
||||
}
|
||||
|
||||
/// Convert a list of properties grouped by space id, then positions
|
||||
/// to a list of Spatial Objects for the rest API v1.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `list`:
|
||||
/// A list of (**Space Id**, [ ( *Spatial position*, `&Properties` ) ]) tuples.
|
||||
pub fn to_spatial_objects(
|
||||
list: Vec<(&String, Vec<(space::Position, &database::Properties)>)>,
|
||||
) -> Vec<SpatialObject> {
|
||||
// Filter per Properties, in order to regroup by it, then build
|
||||
// a single SpatialObject per Properties.
|
||||
let mut hashmap = HashMap::new();
|
||||
for (space, v) in list {
|
||||
for (position, properties) in v {
|
||||
hashmap
|
||||
.entry(properties)
|
||||
.or_insert_with(Vec::new)
|
||||
.push((space, position));
|
||||
}
|
||||
}
|
||||
|
||||
let mut results = vec![];
|
||||
for (properties, v) in hashmap.iter() {
|
||||
// Group by spaces, to collect points shapes together
|
||||
let shapes = v
|
||||
.iter()
|
||||
.map(|(space_id, position)| Shape {
|
||||
type_name: "Point".to_string(),
|
||||
reference_space: (*space_id).clone(),
|
||||
vertices: vec![position.into()],
|
||||
})
|
||||
.collect();
|
||||
|
||||
results.push(SpatialObject {
|
||||
properties: properties.into(),
|
||||
shapes,
|
||||
});
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
}
|
||||
|
||||
pub mod v2 {
|
||||
//! REST API objects, v2.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::database;
|
||||
use super::space;
|
||||
use super::Point;
|
||||
use super::Properties;
|
||||
|
||||
/// Links Properties to a list of spatial volumes.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpatialObject {
|
||||
/// Definition of the `properties` to tag in space.
|
||||
pub properties: Properties,
|
||||
|
||||
/// List of volumes associated with `properties`.
|
||||
pub volumes: Vec<Volume>,
|
||||
}
|
||||
|
||||
/// Defines a volume as the union of geometric shapes.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Volume {
|
||||
/// Reference space id.
|
||||
pub space: String,
|
||||
|
||||
/// List of geometric shapes defined in the reference space
|
||||
/// `space`.
|
||||
pub shapes: Vec<Shape>,
|
||||
}
|
||||
|
||||
/// Describes an homogeneous list of geometric shapes.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Shape {
|
||||
/// List of points.
|
||||
Points(Vec<Point>),
|
||||
|
||||
/// List of Bounding boxes or *hyper rectangles* for which each
|
||||
/// face is perpendicular to one of the axis of the reference
|
||||
/// space.
|
||||
///
|
||||
/// That property allows us to describe such a hyperrectangle
|
||||
/// with two corners:
|
||||
///
|
||||
/// * one for which all the coordinates are the smallest among
|
||||
/// all the corners, per dimension, which is called here
|
||||
/// *lower corner*
|
||||
///
|
||||
/// * one for which all the coordinates are the greatest among
|
||||
/// all the corners, per dimension, which is called
|
||||
/// *higher corner*.
|
||||
///
|
||||
/// The list simply stores tuples of (`lower corner`,
|
||||
/// `higher corner`), as this is enough to reconstruct all the
|
||||
/// corners of the bounding box.
|
||||
BoundingBoxes(Vec<(Point, Point)>),
|
||||
|
||||
/// List of hyperspheres, stored as (`center`, radius) tuples.
|
||||
HyperSpheres(Vec<(Point, f64)>),
|
||||
}
|
||||
|
||||
/// Convert a list of space id grouped by properties, then positions
|
||||
/// to a list of Spatial Objects for the rest API v2.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `list`:
|
||||
/// A list of (`&Properties`, [ ( **Space Id**, [ *Spatial position* ] ) ]) tuples.
|
||||
#[allow(clippy::type_complexity)]
|
||||
// Type alias cannot be used as Traits, so we can't use the alias to add the lifetime specifications.
|
||||
pub fn from_spaces_by_properties<'o>(
|
||||
objects: Box<
|
||||
(dyn Iterator<
|
||||
Item=(
|
||||
&'o database::Properties,
|
||||
Vec<(&'o String, Box<dyn Iterator<Item=space::Position> + 'o>)>,
|
||||
),
|
||||
> + 'o),
|
||||
>,
|
||||
) -> impl Iterator<Item=SpatialObject> + 'o {
|
||||
objects.map(|(property, positions_by_spaces)| {
|
||||
let volumes = positions_by_spaces
|
||||
.into_iter()
|
||||
.map(|(space, positions)| {
|
||||
// We are not using vec![] as we now beforehand we
|
||||
// will have only one element in the vector, so we
|
||||
// optimise for space by allocating it as such.
|
||||
let shapes = vec![
|
||||
Shape::Points(positions.map(|position|
|
||||
position.into()).collect::<Vec<_>>())
|
||||
];
|
||||
|
||||
Volume {
|
||||
space: space.clone(),
|
||||
shapes,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
SpatialObject {
|
||||
properties: (&property).into(),
|
||||
volumes,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Convert a list of properties grouped by space id, then positions
|
||||
/// to a list of Spatial Objects for the rest API v2.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `list`:
|
||||
/// A list of (**Space Id**, [ ( *Spatial position*, `&Properties` ) ]) tuples.
|
||||
pub fn from_properties_by_spaces(
|
||||
objects: database::IterObjectsBySpaces<'_>,
|
||||
) -> impl Iterator<Item=SpatialObject> + '_ {
|
||||
// Filter per Properties, in order to regroup by it, then build
|
||||
// a single SpatialObject per Properties.
|
||||
let mut hashmap = HashMap::new();
|
||||
for (space, v) in objects {
|
||||
for (position, properties) in v {
|
||||
hashmap
|
||||
.entry(properties)
|
||||
.or_insert_with(HashMap::new)
|
||||
.entry(space)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(position);
|
||||
}
|
||||
}
|
||||
|
||||
let results = Box::new(hashmap.into_iter().map(|(property, hm)| {
|
||||
let positions = hm
|
||||
.into_iter()
|
||||
.map(|(space, positions)| {
|
||||
let positions: database::IterPositions = Box::new(positions.into_iter());
|
||||
(space, positions)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
(property, positions)
|
||||
}));
|
||||
|
||||
from_spaces_by_properties(results)
|
||||
}
|
||||
}
|
||||
|
||||
/// **Properties** which are registered at one or more spatial locations.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Properties {
|
||||
/// The **type** of *Id*, this allows for different kinds of objects
|
||||
/// to have the same *Id*, but handled distinctly.
|
||||
#[serde(rename = "type")]
|
||||
pub type_name: String,
|
||||
|
||||
/// An arbitrary string.
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
impl From<&space::Graduation> for Graduation {
|
||||
fn from(g: &space::Graduation) -> Self {
|
||||
Graduation {
|
||||
set: (&g.set).into(),
|
||||
minimum: g.minimum,
|
||||
maximum: g.maximum,
|
||||
steps: g.steps,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Axis> for space::Axis {
|
||||
fn from(axis: Axis) -> Self {
|
||||
let g = axis.graduation;
|
||||
|
||||
space::Axis::new(
|
||||
&axis.measurement_unit,
|
||||
axis.unit_vector,
|
||||
g.set.as_str().into(),
|
||||
g.minimum,
|
||||
g.maximum,
|
||||
g.steps,
|
||||
)
|
||||
.unwrap_or_else(|e| panic!("Unable to create Axis as defined: {}", e))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&space::Axis> for Axis {
|
||||
fn from(axis: &space::Axis) -> Self {
|
||||
Axis {
|
||||
measurement_unit: axis.measurement_unit().into(),
|
||||
graduation: axis.graduation().into(),
|
||||
unit_vector: axis.unit_vector().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Space> for space::Space {
|
||||
fn from(space: &Space) -> Self {
|
||||
let axes = space
|
||||
.axes
|
||||
.iter()
|
||||
.map(|a| a.clone().into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let system = space::CoordinateSystem::new(space.origin.clone(), axes);
|
||||
|
||||
space::Space::new(&space.name, system)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&space::Space> for Space {
|
||||
fn from(space: &space::Space) -> Self {
|
||||
let axes = space.axes().iter().map(|a| a.into()).collect::<Vec<_>>();
|
||||
|
||||
Space {
|
||||
name: space.name().clone(),
|
||||
origin: space.origin().into(),
|
||||
axes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&&database::Properties> for Properties {
|
||||
fn from(p: &&database::Properties) -> Self {
|
||||
Properties {
|
||||
type_name: p.type_name().to_string(),
|
||||
id: p.id().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub use v1::SpatialObject;
|
||||
|
||||
/// Generate an index.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// Name to give to the index.
|
||||
///
|
||||
/// * `version`:
|
||||
/// Parameter to distinguish revisions of an index.
|
||||
///
|
||||
/// * `spaces`:
|
||||
/// A list of the reference spaces. Only objects whose reference
|
||||
/// space is known will be indexed.
|
||||
///
|
||||
/// * `objects`:
|
||||
/// The data points to index.
|
||||
///
|
||||
/// * `scales`:
|
||||
/// An optional list of specific index resolutions to generates on
|
||||
/// top of the full resolution one.
|
||||
///
|
||||
/// * `max_elements`:
|
||||
/// If this is specified, automatically generates scaled indices, by
|
||||
/// halving the number elements between resolutions, and stop
|
||||
/// generating indices either when the number of points remaining is
|
||||
/// equal to the number of distinct Ids, or smaller or equal to this
|
||||
/// value.
|
||||
///
|
||||
/// **Note**: `max_elements` is ignored when `scales` is not `None`.
|
||||
pub fn build_index(
|
||||
name: &str,
|
||||
version: &str,
|
||||
spaces: &[space::Space],
|
||||
objects: &[SpatialObject],
|
||||
scales: Option<Vec<Vec<u32>>>,
|
||||
max_elements: Option<usize>,
|
||||
) -> Result<Core, String> {
|
||||
let mut properties = vec![];
|
||||
let mut space_set_objects = vec![];
|
||||
{
|
||||
let mut properties_ref = vec![];
|
||||
let mut properties_hm = HashMap::new();
|
||||
|
||||
for object in objects {
|
||||
let value = match properties_hm.get(object.properties.id.as_str()) {
|
||||
Some(_) => {
|
||||
properties_ref.push(object.properties.id.as_str());
|
||||
properties_ref.len() - 1
|
||||
}
|
||||
None => {
|
||||
properties_hm.insert(
|
||||
object.properties.id.as_str(),
|
||||
database::Properties::Feature(object.properties.id.clone()),
|
||||
);
|
||||
|
||||
properties_ref.push(object.properties.id.as_str());
|
||||
properties_ref.len() - 1
|
||||
}
|
||||
};
|
||||
|
||||
for point in &object.shapes {
|
||||
assert_eq!(point.type_name, "Point");
|
||||
|
||||
space_set_objects.push(SpaceSetObject::new(
|
||||
&point.reference_space,
|
||||
// Use a reference to prevent an allocation
|
||||
(&point.vertices[0]).into(),
|
||||
value,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
properties.append(&mut properties_hm.drain().map(|(_, v)| v).collect::<Vec<_>>());
|
||||
|
||||
// We we use sort_by_key, we get borrow checker errors.
|
||||
#[allow(clippy::unnecessary_sort_by)]
|
||||
properties.sort_unstable_by(|a, b| a.id().cmp(b.id()));
|
||||
|
||||
space_set_objects.iter_mut().for_each(|object| {
|
||||
let id = properties_ref[object.value()];
|
||||
let value = properties.binary_search_by_key(&id, |p| p.id()).unwrap();
|
||||
object.set_value(value);
|
||||
});
|
||||
}
|
||||
|
||||
Core::new(
|
||||
name,
|
||||
version,
|
||||
spaces,
|
||||
properties,
|
||||
space_set_objects,
|
||||
scales,
|
||||
max_elements,
|
||||
)
|
||||
}
|
||||
237
src/storage/xyz.rs
Normal file
237
src/storage/xyz.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
//! # XYZ file format
|
||||
//!
|
||||
//! This module support reading files read by [MeshView] tool used at
|
||||
//! the [University of Oslo].
|
||||
//!
|
||||
//! # File structure
|
||||
//!
|
||||
//! Each files begins with:
|
||||
//!
|
||||
//! ```txt
|
||||
//! RGBA [Red] [Green] [Blue] [Alpha] # RGBA
|
||||
//! [X],[Y],[Z] # WHS Origin
|
||||
//! [X],[Y],[Z] # Bregma
|
||||
//!
|
||||
//! SCALE [F]
|
||||
//! ```
|
||||
//!
|
||||
//! * `RGBA [Red] [Green] [Blue] [Alpha]`: defines the color to use for
|
||||
//! the following points
|
||||
//! * `[X],[Y],[Z] # WHS Origin`: defines where the Waxholm Origin is
|
||||
//! in Voxel coordinates.
|
||||
//! * `[X],[Y],[Z] # Bregma`: same as above, for another reference
|
||||
//! space.
|
||||
//! * `SCALE [F]`: **TBC** Size of the voxels.
|
||||
//!
|
||||
//! The rest of the file contains (one per line):
|
||||
//! * coordinate triplets (x, y and z), each representing one point
|
||||
//! coordinate.
|
||||
//! * `RGB [Red] [Green] [Blue]`: Which applies from that line
|
||||
//! until further notice.
|
||||
//! * A comment Line, starting with `#`
|
||||
//!
|
||||
//! ## File Coordinate system
|
||||
//!
|
||||
//! Coordinates in MeshView follow RAS (Right-Anterior-Superior)
|
||||
//! orientation and are expressed in voxels:
|
||||
//! * First axis `x` starts from the left side of the volume, and
|
||||
//! points towards the right.
|
||||
//! * Second axis `y` starts from the backmost position in the volume,
|
||||
//! and points towards the front.
|
||||
//! * Third axis `z` starts from the bottom of the volume and points
|
||||
//! towards the top.
|
||||
//!
|
||||
//! # Waxholm Space
|
||||
//!
|
||||
//! ## Conversion to Waxholm Space
|
||||
//!
|
||||
//! The [Waxholm Space Atlas] of the Sprague Dawley Rat Brain (WHS) uses
|
||||
//! the same axis order and orientation as the MeshView tool, there is
|
||||
//! only a translation of the origin, and scaling have to be applied.
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! ```txt
|
||||
//! RGBA 1 0 0 1 # RGBA
|
||||
//! 244,623,248 # WHS Origin
|
||||
//! 246,653,440 # Bregma
|
||||
//!
|
||||
//! #Aar27s49 26 0
|
||||
//! RGB 0.12941176470588237 0.403921568627451 0.1607843137254902
|
||||
//! 221.40199877 413.34541500312037 172.79973508489095
|
||||
//! 220.5800097805 412.82939421970866 173.56428074436994
|
||||
//!
|
||||
//! #Aar27s48 49 0
|
||||
//! RGB 0.12941176470588237 0.403921568627451 0.1607843137254902
|
||||
//! 237.35325687425 412.5720395183866 176.6713556605702
|
||||
//! ```
|
||||
//!
|
||||
//! ## Conversion to Waxholm
|
||||
//!
|
||||
//! Assuming the following extents of "WHS Rat 39 μm" in voxels:
|
||||
//!
|
||||
//! * Leftmost sagittal plane: `x = 0`
|
||||
//! * Backmost coronal plane: `y = 0`
|
||||
//! * Bottommost horizontal plane: `z = 0`
|
||||
//! * Rightmost sagittal plane: `x = 511`
|
||||
//! * Frontmost coronal plane: `y = 1023`
|
||||
//! * Topmost horizontal plane: `z = 511`
|
||||
//!
|
||||
//! **NOTE**: Directions are deliberately matching the default
|
||||
//! orientation of NIfTI data.
|
||||
//!
|
||||
//! 1. As per the `WHS Origin` directive, it is at 244, 623, 248 voxel
|
||||
//! coordinates, which means each coordinate must be subtracted with
|
||||
//! the corresponding value, then
|
||||
//! 2. the coordinates must be converted to millimeters, a.k.a
|
||||
//! multiplied by the atlas resolution. For the atlas of this example
|
||||
//! it is 0.0390625 [mm], isotropic.
|
||||
//!
|
||||
//! This gives us the following conversion formula:
|
||||
//!
|
||||
//! ```txt
|
||||
//! ⎡ 0.0390625 0 0 0 ⎤
|
||||
//! [ xw yw zw 1 ] = [ xq yq zq 1 ] * ⎢ 0 0.0390625 0 0 ⎥
|
||||
//! ⎢ 0 0 0.0390625 0 ⎥
|
||||
//! ⎣ -9.53125 -24.3359375 -9.6875 1 ⎦
|
||||
//! ```
|
||||
//!
|
||||
//! Where:
|
||||
//! * `[xw, yw, zw 1]` are WHS coordinates (RAS directions, expressed
|
||||
//! in millimeters).
|
||||
//! * `[xq, yq, zq 1]` are MeshView coordinates for the **WHS Rat 39 μm**
|
||||
//! package (RAS directions, expressed in 39.0625 μm voxels).
|
||||
//!
|
||||
//!
|
||||
//!
|
||||
//! [MeshView]: http://www.nesys.uio.no/MeshView/meshview.html?atlas=WHS_SD_rat_atlas_v2
|
||||
//! [University of Oslo]: https://www.med.uio.no/imb/english/research/groups/neural-systems/index.html
|
||||
//! [Waxholm Space Atlas]: https://www.nitrc.org/projects/whs-sd-atlas
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::io::Error;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Read;
|
||||
|
||||
use super::bincode::store;
|
||||
use super::model::v1::Shape;
|
||||
use super::model::v1::SpatialObject;
|
||||
use super::model::Properties;
|
||||
|
||||
fn convert(string: &str) -> Result<Vec<SpatialObject>, Error> {
|
||||
// Read manually the XYZ file, as this is a simple format.
|
||||
// Read line by line, skip all line we don't know how to parse, for the
|
||||
// remaining ones do:
|
||||
// * lines starting with '#A' we update the current point ID
|
||||
// * lines we can parse as triplet fo f64, add a position to the list,
|
||||
// under the oid key.
|
||||
let mut oids = HashMap::new();
|
||||
let mut oid = None;
|
||||
let mut origin = vec![];
|
||||
for line in string.lines() {
|
||||
let values = line.split_whitespace().collect::<Vec<_>>();
|
||||
|
||||
if values.is_empty() {
|
||||
// Skip empty lines
|
||||
continue;
|
||||
}
|
||||
|
||||
match values[0] {
|
||||
"RGBA" => (),
|
||||
"RGB" => (),
|
||||
"SCALE" => (),
|
||||
_ if values[0].starts_with("#A") => {
|
||||
// Update the oid value.
|
||||
oid = Some(values[0].trim_start_matches('#').to_string());
|
||||
trace!("FOUND OID {:?}", oid);
|
||||
}
|
||||
_ if line.contains("WHS") => {
|
||||
// Store the voxel offset value
|
||||
let t: Vec<_> = values[0]
|
||||
.split(',')
|
||||
.filter_map(|s| match s.parse::<f64>() {
|
||||
Err(_) => None,
|
||||
Ok(v) => Some(v),
|
||||
})
|
||||
.collect();
|
||||
|
||||
if t.len() == 3 && origin.is_empty() {
|
||||
origin = t;
|
||||
} else {
|
||||
return Err(Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("Invalid WHS origin new {:?}, current {:?}", t, origin),
|
||||
));
|
||||
}
|
||||
trace!("ORIGIN FOUND: {:?}", origin);
|
||||
}
|
||||
_ if values.len() == 3 => {
|
||||
// Check we have an oid to register the position under first.
|
||||
|
||||
let x = values[0].parse::<f64>();
|
||||
let y = values[1].parse::<f64>();
|
||||
let z = values[2].parse::<f64>();
|
||||
|
||||
if let (Some(oid), Ok(x), Ok(y), Ok(z)) = (oid.clone(), x, y, z) {
|
||||
trace!("after (oid, x, y, z) = {:?}", (&oid, &x, &y, &z));
|
||||
// We need to convert these voxel values into mm-s
|
||||
let (x, y, z) = (x - origin[0], y - origin[1], z - origin[2]);
|
||||
let (x, y, z) = (x * 0.039_062_5, y * 0.039_062_5, z * 0.039_062_5);
|
||||
|
||||
oids.entry(oid)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(vec![x, y, z]);
|
||||
}
|
||||
}
|
||||
_ => trace!("line {:?}, values: {:?}", line, values),
|
||||
}
|
||||
}
|
||||
|
||||
// Transform the points into SpatialObjects
|
||||
Ok(oids
|
||||
.drain()
|
||||
.map(|(k, v)| {
|
||||
let properties = Properties {
|
||||
type_name: "Feature".to_string(),
|
||||
id: k,
|
||||
};
|
||||
|
||||
let shapes = v
|
||||
.into_iter()
|
||||
.map(|position| Shape {
|
||||
type_name: "Point".to_string(),
|
||||
reference_space: "WHS-Rat-um".to_string(),
|
||||
vertices: vec![position],
|
||||
})
|
||||
.collect();
|
||||
|
||||
SpatialObject { properties, shapes }
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Read a XYZ file and convert it to the internal format for indexing.
|
||||
///
|
||||
/// This only converts the data point definitions, a reference space
|
||||
/// needs to be provided as well to be able to build an index.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// * `name`:
|
||||
/// Base name of the file,
|
||||
/// * `.xyz` will be automatically appended for the source file, while
|
||||
/// * `.bin` will be appended for the output file.
|
||||
pub fn from(name: &str) -> Result<(), Error> {
|
||||
let fn_in = format!("{}.xyz", name);
|
||||
let fn_out = format!("{}.bin", name);
|
||||
|
||||
let mut file_in = BufReader::new(File::open(&fn_in)?);
|
||||
let mut string = String::new();
|
||||
file_in.read_to_string(&mut string)?;
|
||||
|
||||
let v = convert(&string)?;
|
||||
|
||||
store(v, &fn_out)
|
||||
}
|
||||
Reference in New Issue
Block a user