Compare commits
7 Commits
6fc2cc5942
...
cdb3746a34
| Author | SHA1 | Date | |
|---|---|---|---|
| cdb3746a34 | |||
| 3b34991e24 | |||
| e0b6dda0ac | |||
| 617c2a1018 | |||
| b112fcfab6 | |||
| 8699c066e5 | |||
| 7a0fbc612f |
@@ -30,8 +30,6 @@ path = "src/main.rs"
|
||||
ironsea_index = "^0.1"
|
||||
ironsea_index_sfc_dbc = "^0.1"
|
||||
ironsea_index_hashmap = "^0.1"
|
||||
ironsea_table = "^0.1"
|
||||
ironsea_table_vector = "^0.1"
|
||||
|
||||
memmap = "^0.7"
|
||||
lazy_static = "^1.3"
|
||||
|
||||
@@ -5,14 +5,13 @@ use super::space_db::SpaceDB;
|
||||
use super::space_index::SpaceSetObject;
|
||||
use super::DataBase;
|
||||
use super::ResultSet;
|
||||
use crate::SpaceObject;
|
||||
|
||||
pub struct CoreQueryParameters<'a> {
|
||||
pub db: &'a DataBase,
|
||||
pub output_space: Option<&'a str>,
|
||||
pub threshold_volume: Option<f64>,
|
||||
pub view_port: &'a Option<(Vec<f64>, Vec<f64>)>,
|
||||
pub resolution: Option<Vec<u32>>,
|
||||
pub resolution: &'a Option<Vec<u32>>,
|
||||
}
|
||||
|
||||
impl CoreQueryParameters<'_> {
|
||||
@@ -29,6 +28,7 @@ impl CoreQueryParameters<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Ids are expected unique, irrespective of the enum variant!
|
||||
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
|
||||
pub enum Properties {
|
||||
Feature(String),
|
||||
@@ -65,16 +65,6 @@ impl Properties {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Which is faster, the code below or the automatically generated
|
||||
// implementation?
|
||||
/*
|
||||
impl PartialEq for Properties {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id() == other.id() && self.type_name() == other.type_name()
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Core {
|
||||
title: String,
|
||||
@@ -105,7 +95,7 @@ impl Core {
|
||||
let filtered = space_objects
|
||||
.iter()
|
||||
.filter_map(|object| {
|
||||
if &object.space_id().0 == space.name() {
|
||||
if object.space_id() == space.name() {
|
||||
let position: Vec<f64> = object.position().into();
|
||||
Some(SpaceSetObject::new(
|
||||
space.name(),
|
||||
@@ -157,22 +147,8 @@ impl Core {
|
||||
&self.properties
|
||||
}
|
||||
|
||||
fn to_space_object(&self, space_id: &str, list: Vec<SpaceSetObject>) -> Vec<SpaceObject> {
|
||||
list.into_iter()
|
||||
.map(|o| {
|
||||
let offset: usize = o.value().into();
|
||||
let value = self.properties[offset].clone();
|
||||
SpaceObject {
|
||||
space_id: space_id.to_string(),
|
||||
position: o.position().clone(),
|
||||
value,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn decode_positions(
|
||||
list: &mut [SpaceObject],
|
||||
list: &mut [(Position, &Properties)],
|
||||
space: &Space,
|
||||
db: &DataBase,
|
||||
output_space: &Option<&str>,
|
||||
@@ -181,18 +157,17 @@ impl Core {
|
||||
let unified = db.space(unified_id)?;
|
||||
|
||||
// Rebase the point to the requested output space before decoding.
|
||||
for o in list {
|
||||
o.position = unified
|
||||
.decode(&Space::change_base(&o.position, space, unified)?)?
|
||||
for (position, _) in list {
|
||||
*position = unified
|
||||
.decode(&Space::change_base(&position, space, unified)?)?
|
||||
.into();
|
||||
o.space_id = unified_id.to_string();
|
||||
}
|
||||
} else {
|
||||
// Decode the positions into f64 values, which are defined in their
|
||||
// respective reference space.
|
||||
for o in list {
|
||||
for (position, _) in list {
|
||||
// Simply decode
|
||||
o.position = space.decode(&o.position)?.into();
|
||||
*position = space.decode(&position)?.into();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,12 +208,14 @@ impl Core {
|
||||
p.push(to.encode(&position)?);
|
||||
}
|
||||
|
||||
let r = s.get_by_positions(&p, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let mut r = s
|
||||
.get_by_positions(&p, parameters)?
|
||||
.into_iter()
|
||||
.map(|(position, fields)| (position, &self.properties[fields.value().as_usize()]))
|
||||
.collect::<Vec<_>>();
|
||||
Self::decode_positions(r.as_mut_slice(), to, db, output_space)?;
|
||||
|
||||
Self::decode_positions(&mut r, to, db, output_space)?;
|
||||
|
||||
results.append(&mut r);
|
||||
results.push((s.name(), r));
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
@@ -271,19 +248,25 @@ impl Core {
|
||||
// let current_shape = shape.encode(current_space)?;
|
||||
// println!("current shape Encoded: {:?}", current_shape);
|
||||
|
||||
let r = s.get_by_shape(¤t_shape, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let mut r = s
|
||||
.get_by_shape(¤t_shape, parameters)?
|
||||
.into_iter()
|
||||
.map(|(position, fields)| (position, &self.properties[fields.value().as_usize()]))
|
||||
.collect::<Vec<_>>();
|
||||
Self::decode_positions(r.as_mut_slice(), current_space, db, output_space)?;
|
||||
|
||||
Self::decode_positions(&mut r, current_space, db, output_space)?;
|
||||
|
||||
results.append(&mut r);
|
||||
results.push((s.name(), r));
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
// Search by Id, a.k.a values
|
||||
pub fn get_by_id<S>(&self, parameters: &CoreQueryParameters, id: S) -> ResultSet
|
||||
pub fn get_by_id<S>(
|
||||
&self,
|
||||
parameters: &CoreQueryParameters,
|
||||
id: S,
|
||||
) -> Result<Vec<(&String, Vec<Position>)>, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
@@ -304,12 +287,28 @@ impl Core {
|
||||
for s in &self.space_db {
|
||||
let current_space = db.space(s.name())?;
|
||||
|
||||
let r = s.get_by_id(offset, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let mut positions = s.get_by_id(offset, parameters)?;
|
||||
|
||||
Self::decode_positions(&mut r, current_space, db, output_space)?;
|
||||
//Self::decode_positions(r.as_mut_slice(), current_space, db, output_space)?;
|
||||
if let Some(unified_id) = *output_space {
|
||||
let unified = db.space(unified_id)?;
|
||||
|
||||
results.append(&mut r);
|
||||
// Rebase the point to the requested output space before decoding.
|
||||
for position in &mut positions {
|
||||
*position = unified
|
||||
.decode(&Space::change_base(position, current_space, unified)?)?
|
||||
.into();
|
||||
}
|
||||
} else {
|
||||
// Decode the positions into f64 values, which are defined in their
|
||||
// respective reference space.
|
||||
for position in &mut positions {
|
||||
// Simply decode
|
||||
*position = current_space.decode(position)?.into();
|
||||
}
|
||||
}
|
||||
|
||||
results.push((s.name(), positions));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -350,9 +349,9 @@ impl Core {
|
||||
Ok(v) => {
|
||||
// Convert the search Volume into Universe.
|
||||
let mut p = vec![];
|
||||
for o in v {
|
||||
for position in v {
|
||||
if let Ok(position) =
|
||||
Space::change_base(o.position(), from, Space::universe())
|
||||
Space::change_base(&position, from, Space::universe())
|
||||
{
|
||||
p.push(position)
|
||||
}
|
||||
@@ -384,21 +383,20 @@ impl Core {
|
||||
p.push(position);
|
||||
}
|
||||
|
||||
let r = s.get_by_positions(&p, parameters)?;
|
||||
let mut r = self.to_space_object(s.name(), r);
|
||||
let mut r = s
|
||||
.get_by_positions(&p, parameters)?
|
||||
.into_iter()
|
||||
.map(|(position, fields)| {
|
||||
(position, &self.properties[fields.value().as_usize()])
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Self::decode_positions(&mut r, to, db, output_space)?;
|
||||
Self::decode_positions(r.as_mut_slice(), to, db, output_space)?;
|
||||
|
||||
results.append(&mut r);
|
||||
results.push((s.name(), r));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::Record<String> for Core {
|
||||
fn key(&self) -> String {
|
||||
self.title.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
|
||||
use ironsea_index::Indexed;
|
||||
use ironsea_table_vector::VectorTable;
|
||||
use memmap::Mmap;
|
||||
|
||||
pub use db_core::Core;
|
||||
@@ -15,39 +14,13 @@ pub use db_core::CoreQueryParameters;
|
||||
pub use db_core::Properties;
|
||||
use space::Position;
|
||||
use space::Space;
|
||||
pub use space_index::SpaceFields;
|
||||
pub use space_index::SpaceSetObject;
|
||||
|
||||
pub type ResultSet = Result<Vec<SpaceObject>, String>;
|
||||
pub type ReferenceSpaceIndex = ironsea_index_hashmap::Index<VectorTable<Space>, Space, String>;
|
||||
type CoreIndex = ironsea_index_hashmap::Index<VectorTable<Core>, Core, String>;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Hash, PartialEq, Serialize)]
|
||||
pub struct SpaceId(String);
|
||||
|
||||
impl SpaceId {
|
||||
pub fn new<S>(space_name: S) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
SpaceId(space_name.into())
|
||||
}
|
||||
|
||||
pub fn get(&self, index: &ReferenceSpaceIndex) -> Self {
|
||||
let s = index.find(&self.0);
|
||||
assert_eq!(s.len(), 1);
|
||||
|
||||
SpaceId(s[0].name().clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> From<S> for SpaceId
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
fn from(id: S) -> Self {
|
||||
SpaceId(id.into())
|
||||
}
|
||||
}
|
||||
// (Space Name, Position, Fields)
|
||||
pub type ResultSet<'r> = Result<Vec<(&'r String, Vec<(Position, &'r Properties)>)>, String>;
|
||||
pub type ReferenceSpaceIndex = ironsea_index_hashmap::Index<Space, String>;
|
||||
type CoreIndex = ironsea_index_hashmap::Index<Core, String>;
|
||||
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)]
|
||||
pub struct SpaceObject {
|
||||
@@ -56,7 +29,6 @@ pub struct SpaceObject {
|
||||
pub value: Properties,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct DataBase {
|
||||
reference_spaces: ReferenceSpaceIndex,
|
||||
cores: CoreIndex,
|
||||
@@ -65,8 +37,8 @@ pub struct DataBase {
|
||||
impl DataBase {
|
||||
pub fn new(spaces: Vec<Space>, cores: Vec<Core>) -> Self {
|
||||
DataBase {
|
||||
reference_spaces: ReferenceSpaceIndex::new(VectorTable::new(spaces)),
|
||||
cores: CoreIndex::new(VectorTable::new(cores)),
|
||||
reference_spaces: ReferenceSpaceIndex::new(spaces.into_iter()),
|
||||
cores: CoreIndex::new(cores.into_iter()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,7 +84,7 @@ impl DataBase {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_core(name: &str) -> Result<(Vec<Space>, Core), String> {
|
||||
fn load_core(name: &str) -> Result<(Vec<Space>, Core), String> {
|
||||
let mmap = DataBase::mmap_file(&name)?;
|
||||
|
||||
match bincode::deserialize(&mmap[..]) {
|
||||
@@ -161,7 +133,7 @@ impl DataBase {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn space_id<S>(&self, name: S) -> Result<SpaceId, String>
|
||||
pub fn space_id<S>(&self, name: S) -> Result<String, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
@@ -169,7 +141,7 @@ impl DataBase {
|
||||
let r = self.reference_spaces.find(&name);
|
||||
let s: &Space = Self::check_exactly_one(&r, "spaces", &name)?;
|
||||
|
||||
Ok(SpaceId(s.name().clone()))
|
||||
Ok(s.name().clone())
|
||||
}
|
||||
|
||||
// Lookup a space within the reference spaces registered
|
||||
@@ -178,10 +150,7 @@ impl DataBase {
|
||||
}
|
||||
|
||||
// Lookup a space within the reference spaces registered
|
||||
pub fn space<S>(&self, name: S) -> Result<&Space, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
pub fn space(&self, name: &str) -> Result<&Space, String> {
|
||||
let name = name.into();
|
||||
if &name == space::Space::universe().name() {
|
||||
Ok(space::Space::universe())
|
||||
@@ -198,10 +167,7 @@ impl DataBase {
|
||||
}
|
||||
|
||||
// Lookup a dataset within the datasets registered
|
||||
pub fn core<S>(&self, name: S) -> Result<&Core, String>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
pub fn core(&self, name: &str) -> Result<&Core, String> {
|
||||
let name = name.into();
|
||||
let r = self.cores.find(&name);
|
||||
|
||||
@@ -214,3 +180,9 @@ impl ironsea_index::Record<String> for Space {
|
||||
self.name().clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::Record<String> for Core {
|
||||
fn key(&self) -> String {
|
||||
self.name().clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,7 +188,7 @@ impl Axis {
|
||||
// Apply Unit scaling
|
||||
let d = d * self.measurement_unit.factor();
|
||||
|
||||
Ok(self.unit_vector.clone() * d)
|
||||
Ok(&self.unit_vector * d)
|
||||
}
|
||||
|
||||
// Value is expressed on the current Axis, not in absolute coordinates!
|
||||
|
||||
@@ -39,6 +39,10 @@ impl Coordinate {
|
||||
Coordinate::CoordinateF64(_v) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_usize(&self) -> usize {
|
||||
self.u64() as usize
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -5,7 +5,7 @@ use super::MAX_K;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub enum CoordinateSystem {
|
||||
Universe,
|
||||
Universe { origin: Position },
|
||||
// Coordinates in Universe, expressed in f64, and in the Universe number of dimensions.
|
||||
AffineSystem { origin: Position, axes: Vec<Axis> },
|
||||
}
|
||||
@@ -18,19 +18,16 @@ impl CoordinateSystem {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> Position {
|
||||
pub fn origin(&self) -> &Position {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
let origin = [0f64; MAX_K].to_vec();
|
||||
origin.into()
|
||||
}
|
||||
CoordinateSystem::AffineSystem { origin, .. } => origin.clone(),
|
||||
CoordinateSystem::Universe { origin, .. } => origin,
|
||||
CoordinateSystem::AffineSystem { origin, .. } => origin,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn axes(&self) -> Vec<Axis> {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
//FIXME: Generate a CoordinateSystem on the fly or store it as part of the Universe Space?
|
||||
unimplemented!()
|
||||
}
|
||||
@@ -40,7 +37,7 @@ impl CoordinateSystem {
|
||||
|
||||
pub fn dimensions(&self) -> usize {
|
||||
match self {
|
||||
CoordinateSystem::Universe => MAX_K,
|
||||
CoordinateSystem::Universe { .. } => MAX_K,
|
||||
CoordinateSystem::AffineSystem { axes, .. } => axes.len(),
|
||||
}
|
||||
}
|
||||
@@ -50,10 +47,10 @@ impl CoordinateSystem {
|
||||
let mut high = Vec::with_capacity(self.dimensions());
|
||||
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
for _ in 0..self.dimensions() {
|
||||
low.push(std::f64::MAX);
|
||||
high.push(std::f64::MIN);
|
||||
low.push(std::f64::MIN);
|
||||
high.push(std::f64::MAX);
|
||||
}
|
||||
}
|
||||
CoordinateSystem::AffineSystem { axes, .. } => {
|
||||
@@ -84,16 +81,16 @@ impl CoordinateSystem {
|
||||
// return a position in the current coordinate system.
|
||||
pub fn rebase(&self, position: &Position) -> Result<Position, String> {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { origin } => {
|
||||
// Ensure the coordinates are encoded into F64 variants of
|
||||
// coordinates by forcing an addition to the origin position
|
||||
// which is expressed as F64 variants. The addition will convert
|
||||
// to F64 automatically.
|
||||
Ok(self.origin().clone() + position.clone())
|
||||
Ok(origin + position)
|
||||
}
|
||||
CoordinateSystem::AffineSystem { origin, axes } => {
|
||||
let dimensions = axes.len();
|
||||
let translated = position.clone() - origin.clone();
|
||||
let translated = position - origin;
|
||||
let mut rebased = Vec::with_capacity(dimensions);
|
||||
|
||||
for a in axes.iter().take(dimensions) {
|
||||
@@ -110,16 +107,16 @@ impl CoordinateSystem {
|
||||
// return a position in Universe coordinates.
|
||||
pub fn absolute_position(&self, position: &Position) -> Result<Position, String> {
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { origin } => {
|
||||
// Ensure the coordinates are encoded into F64 variants of
|
||||
// coordinates by forcing an addition to the origin position
|
||||
// which is expressed as F64 variants. The addition will convert
|
||||
// to F64 automatically.
|
||||
Ok(self.origin().clone() + position.clone())
|
||||
Ok(origin + position)
|
||||
}
|
||||
CoordinateSystem::AffineSystem { axes, .. } => {
|
||||
// Start from the base origin.
|
||||
let mut rebased = self.origin();
|
||||
let mut rebased = self.origin().clone();
|
||||
|
||||
// Convert to Universe coordinates
|
||||
for k in 0..axes.len() {
|
||||
@@ -138,7 +135,7 @@ impl CoordinateSystem {
|
||||
let mut encoded = vec![];
|
||||
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
assert_eq!(position.len(), MAX_K);
|
||||
for c in position {
|
||||
encoded.push(Coordinate::CoordinateF64(*c));
|
||||
@@ -161,7 +158,7 @@ impl CoordinateSystem {
|
||||
let mut decoded = vec![];
|
||||
|
||||
match self {
|
||||
CoordinateSystem::Universe => {
|
||||
CoordinateSystem::Universe { .. } => {
|
||||
assert_eq!(position.dimensions(), MAX_K);
|
||||
for c in 0..position.dimensions() {
|
||||
decoded.push(position[c].into());
|
||||
|
||||
@@ -20,7 +20,9 @@ pub const MAX_K: usize = 3;
|
||||
lazy_static! {
|
||||
static ref UNIVERSE: Space = Space {
|
||||
name: "Universe".into(),
|
||||
system: CoordinateSystem::Universe,
|
||||
system: CoordinateSystem::Universe {
|
||||
origin: [0f64; MAX_K].to_vec().into()
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -53,7 +55,7 @@ impl Space {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> Position {
|
||||
pub fn origin(&self) -> &Position {
|
||||
self.system.origin()
|
||||
}
|
||||
|
||||
|
||||
@@ -67,19 +67,17 @@ impl Position {
|
||||
|
||||
// Unit / Normalized vector from self.
|
||||
pub fn unit(&self) -> Self {
|
||||
self.clone() * (1f64 / self.norm())
|
||||
self * (1f64 / self.norm())
|
||||
}
|
||||
|
||||
// This multiplies self^T with other, producing a scalar value
|
||||
pub fn dot_product(&self, other: &Self) -> f64 {
|
||||
assert_eq!(self.dimensions(), other.dimensions());
|
||||
|
||||
let point = self.clone();
|
||||
let other = other.clone();
|
||||
let mut product = 0f64;
|
||||
|
||||
for k in 0..self.dimensions() {
|
||||
product += (point[k] * other[k]).f64();
|
||||
product += (self[k] * other[k]).f64();
|
||||
}
|
||||
|
||||
product
|
||||
@@ -190,6 +188,22 @@ impl Add for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for &Position {
|
||||
type Output = Position;
|
||||
|
||||
fn add(self, rhs: Self) -> Self::Output {
|
||||
let dimensions = self.dimensions();
|
||||
assert_eq!(dimensions, rhs.dimensions());
|
||||
let mut v = Vec::with_capacity(dimensions);
|
||||
|
||||
for k in 0..dimensions {
|
||||
v.push(self[k] + rhs[k]);
|
||||
}
|
||||
|
||||
v.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Position {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
let dimensions = self.dimensions();
|
||||
@@ -247,6 +261,21 @@ impl Mul<f64> for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<f64> for &Position {
|
||||
type Output = Position;
|
||||
|
||||
fn mul(self, rhs: f64) -> Self::Output {
|
||||
let dimensions = self.dimensions();
|
||||
let mut v = Vec::with_capacity(dimensions);
|
||||
|
||||
for k in 0..dimensions {
|
||||
v.push(self[k] * rhs);
|
||||
}
|
||||
|
||||
v.into()
|
||||
}
|
||||
}
|
||||
|
||||
// Scalar product
|
||||
impl MulAssign<f64> for Position {
|
||||
fn mul_assign(&mut self, rhs: f64) {
|
||||
|
||||
@@ -78,8 +78,8 @@ impl Shape {
|
||||
for _ in 0..dimensions {
|
||||
vr.push(*radius);
|
||||
}
|
||||
let vr: Position = vr.into();
|
||||
(center.clone() - vr.clone(), center.clone() + vr)
|
||||
let vr: &Position = &vr.into();
|
||||
(center - vr, center + vr)
|
||||
}
|
||||
Shape::BoundingBox(lower, higher) => (lower.clone(), higher.clone()),
|
||||
}
|
||||
@@ -188,7 +188,7 @@ impl Shape {
|
||||
|
||||
let positions = Shape::gen(&lower, &higher)
|
||||
.into_iter()
|
||||
.filter(|p| (p.clone() - center.clone()).norm() <= radius)
|
||||
.filter(|p| (p - center).norm() <= radius)
|
||||
.collect();
|
||||
|
||||
Ok(positions)
|
||||
|
||||
@@ -5,8 +5,6 @@ use std::collections::HashSet;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
|
||||
use ironsea_table_vector::VectorTable;
|
||||
|
||||
use super::space::Coordinate;
|
||||
use super::space::Position;
|
||||
use super::space::Shape;
|
||||
@@ -101,11 +99,7 @@ impl SpaceDB {
|
||||
let shift = if count >= 31 { 31 } else { count };
|
||||
count += 1;
|
||||
indices.push((
|
||||
SpaceSetIndex::new(
|
||||
&VectorTable::new(space_objects.to_vec()),
|
||||
DIMENSIONS,
|
||||
CELL_BITS,
|
||||
),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![power.0, power.0, power.0],
|
||||
shift,
|
||||
));
|
||||
@@ -124,11 +118,7 @@ impl SpaceDB {
|
||||
|
||||
// Insert Full resolution index.
|
||||
indices.push((
|
||||
SpaceSetIndex::new(
|
||||
&VectorTable::new(space_objects.clone()),
|
||||
DIMENSIONS,
|
||||
CELL_BITS,
|
||||
),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![count, count, count],
|
||||
0, // Smallest value => highest resolution
|
||||
));
|
||||
@@ -167,11 +157,7 @@ impl SpaceDB {
|
||||
}
|
||||
|
||||
indices.push((
|
||||
SpaceSetIndex::new(
|
||||
&VectorTable::new(space_objects.to_vec()),
|
||||
DIMENSIONS,
|
||||
CELL_BITS,
|
||||
),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![count, count, count],
|
||||
shift,
|
||||
));
|
||||
@@ -186,7 +172,7 @@ impl SpaceDB {
|
||||
} else {
|
||||
// Generate only full-scale.
|
||||
indices.push((
|
||||
SpaceSetIndex::new(&VectorTable::new(space_objects), DIMENSIONS, CELL_BITS),
|
||||
SpaceSetIndex::new(space_objects.iter(), DIMENSIONS, CELL_BITS),
|
||||
vec![0, 0, 0],
|
||||
0,
|
||||
));
|
||||
@@ -316,12 +302,10 @@ impl SpaceDB {
|
||||
}
|
||||
|
||||
// Convert the value back to caller's references
|
||||
fn decode_value(&self, mut objects: Vec<SpaceSetObject>) -> Vec<SpaceSetObject> {
|
||||
for o in &mut objects {
|
||||
o.set_value(self.values[o.value().u64() as usize]);
|
||||
fn decode_value(&self, objects: &mut Vec<(Position, SpaceFields)>) {
|
||||
for (_, fields) in objects.iter_mut() {
|
||||
fields.set_value(self.values[fields.value().u64() as usize]);
|
||||
}
|
||||
|
||||
objects
|
||||
}
|
||||
|
||||
// Search by Id, a.k.a values
|
||||
@@ -330,7 +314,7 @@ impl SpaceDB {
|
||||
&self,
|
||||
id: usize,
|
||||
parameters: &CoreQueryParameters,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Vec<(Position)>, String> {
|
||||
// Is that ID referenced in the current space?
|
||||
if let Ok(offset) = self.values.binary_search(&id.into()) {
|
||||
let index = self.resolution(parameters);
|
||||
@@ -343,20 +327,17 @@ impl SpaceDB {
|
||||
let objects = self.resolutions[index]
|
||||
.find_by_value(&SpaceFields::new(self.name().into(), offset.into()));
|
||||
|
||||
let mut results = if let Some(view_port) = view_port {
|
||||
let results = if let Some(view_port) = view_port {
|
||||
objects
|
||||
.into_iter()
|
||||
.filter(|o| view_port.contains(o.position()))
|
||||
.collect::<Vec<SpaceSetObject>>()
|
||||
.filter(|position| view_port.contains(position))
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
objects
|
||||
};
|
||||
|
||||
// Convert the Value back to caller's references
|
||||
// Here we do not use decode() as we have a single id value to manage.
|
||||
for o in &mut results {
|
||||
o.set_value(id.into());
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
} else {
|
||||
@@ -370,7 +351,7 @@ impl SpaceDB {
|
||||
&self,
|
||||
positions: &[Position],
|
||||
parameters: &CoreQueryParameters,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Vec<(Position, SpaceFields)>, String> {
|
||||
let index = self.resolution(parameters);
|
||||
|
||||
// FIXME: Should I do it here, or add the assumption this is a clean list?
|
||||
@@ -379,13 +360,18 @@ impl SpaceDB {
|
||||
//let view_port = parameters.view_port(space);
|
||||
|
||||
// Select the objects
|
||||
let results = positions
|
||||
let mut results = positions
|
||||
.iter()
|
||||
.flat_map(|position| self.resolutions[index].find(position))
|
||||
.collect::<Vec<SpaceSetObject>>();
|
||||
.flat_map(|position| {
|
||||
self.resolutions[index]
|
||||
.find(position)
|
||||
.into_iter()
|
||||
.map(move |fields| (position.clone(), fields.clone()))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Decode the Value reference
|
||||
let results = self.decode_value(results);
|
||||
self.decode_value(&mut results);
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
@@ -400,7 +386,7 @@ impl SpaceDB {
|
||||
&self,
|
||||
shape: &Shape,
|
||||
parameters: &CoreQueryParameters,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Vec<(Position, SpaceFields)>, String> {
|
||||
let index = self.resolution(parameters);
|
||||
|
||||
// Convert the view port to the encoded space coordinates
|
||||
@@ -408,10 +394,14 @@ impl SpaceDB {
|
||||
let view_port = parameters.view_port(space);
|
||||
|
||||
// Select the objects
|
||||
let results = self.resolutions[index].find_by_shape(&shape, &view_port)?;
|
||||
let mut results = self.resolutions[index]
|
||||
.find_by_shape(&shape, &view_port)?
|
||||
.into_iter()
|
||||
.map(|(position, fields)| (position, fields.clone()))
|
||||
.collect();
|
||||
|
||||
// Decode the Value reference
|
||||
let results = self.decode_value(results);
|
||||
self.decode_value(&mut results);
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
use std::cmp::Ord;
|
||||
|
||||
use ironsea_index::IndexedOwned;
|
||||
use ironsea_table_vector::VectorTable;
|
||||
use ironsea_index::IndexedDestructured;
|
||||
|
||||
use super::space::Coordinate;
|
||||
use super::space::Position;
|
||||
use super::space::Shape;
|
||||
use super::SpaceId;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Hash, Serialize)]
|
||||
#[derive(Clone, Debug, Hash)]
|
||||
pub struct SpaceSetObject {
|
||||
space_id: SpaceId,
|
||||
space_id: String,
|
||||
position: Position,
|
||||
value: Coordinate, // Efficiently store the offset within the SpaceDB values vector
|
||||
}
|
||||
@@ -28,7 +26,7 @@ impl SpaceSetObject {
|
||||
&self.value
|
||||
}
|
||||
|
||||
pub fn space_id(&self) -> &SpaceId {
|
||||
pub fn space_id(&self) -> &String {
|
||||
&self.space_id
|
||||
}
|
||||
|
||||
@@ -51,14 +49,26 @@ impl SpaceSetObject {
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpaceFields {
|
||||
space_id: SpaceId,
|
||||
space_id: String,
|
||||
value: Coordinate,
|
||||
}
|
||||
|
||||
impl SpaceFields {
|
||||
pub fn new(space_id: SpaceId, value: Coordinate) -> Self {
|
||||
pub fn new(space_id: String, value: Coordinate) -> Self {
|
||||
SpaceFields { space_id, value }
|
||||
}
|
||||
|
||||
pub fn space_id(&self) -> &String {
|
||||
&self.space_id
|
||||
}
|
||||
|
||||
pub fn value(&self) -> &Coordinate {
|
||||
&self.value
|
||||
}
|
||||
|
||||
pub fn set_value(&mut self, value: Coordinate) {
|
||||
self.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for SpaceFields {
|
||||
@@ -67,13 +77,13 @@ impl PartialEq for SpaceFields {
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::Record<Position> for SpaceSetObject {
|
||||
impl ironsea_index::Record<Position> for &SpaceSetObject {
|
||||
fn key(&self) -> Position {
|
||||
self.position.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::RecordFields<SpaceFields> for SpaceSetObject {
|
||||
impl ironsea_index::RecordFields<SpaceFields> for &SpaceSetObject {
|
||||
fn fields(&self) -> SpaceFields {
|
||||
SpaceFields {
|
||||
space_id: self.space_id().clone(),
|
||||
@@ -82,23 +92,7 @@ impl ironsea_index::RecordFields<SpaceFields> for SpaceSetObject {
|
||||
}
|
||||
}
|
||||
|
||||
impl ironsea_index::RecordBuild<Position, SpaceFields, SpaceSetObject> for SpaceSetObject {
|
||||
fn build(key: &Position, fields: &SpaceFields) -> SpaceSetObject {
|
||||
SpaceSetObject {
|
||||
space_id: fields.space_id.clone(),
|
||||
position: key.clone(),
|
||||
value: fields.value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type SpaceSetIndex = ironsea_index_sfc_dbc::IndexOwned<
|
||||
VectorTable<SpaceSetObject>,
|
||||
SpaceSetObject,
|
||||
Position,
|
||||
Coordinate,
|
||||
SpaceFields,
|
||||
>;
|
||||
pub type SpaceSetIndex = ironsea_index_sfc_dbc::IndexOwned<SpaceFields, Position, Coordinate>;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct SpaceIndex {
|
||||
@@ -126,17 +120,17 @@ impl SpaceIndex {
|
||||
}
|
||||
|
||||
// Inputs and Results are expressed in encoded space coordinates.
|
||||
pub fn find(&self, key: &Position) -> Vec<SpaceSetObject> {
|
||||
pub fn find(&self, key: &Position) -> Vec<&SpaceFields> {
|
||||
self.index.find(key)
|
||||
}
|
||||
|
||||
// Inputs and Results are expressed in encoded space coordinates.
|
||||
fn find_range(&self, start: &Position, end: &Position) -> Vec<SpaceSetObject> {
|
||||
fn find_range(&self, start: &Position, end: &Position) -> Vec<(Position, &SpaceFields)> {
|
||||
self.index.find_range(start, end)
|
||||
}
|
||||
|
||||
// Inputs and Results are expressed in encoded space coordinates.
|
||||
pub fn find_by_value(&self, id: &SpaceFields) -> Vec<SpaceSetObject> {
|
||||
pub fn find_by_value(&self, id: &SpaceFields) -> Vec<Position> {
|
||||
self.index.find_by_value(id)
|
||||
}
|
||||
|
||||
@@ -145,21 +139,22 @@ impl SpaceIndex {
|
||||
&self,
|
||||
shape: &Shape,
|
||||
view_port: &Option<Shape>,
|
||||
) -> Result<Vec<SpaceSetObject>, String> {
|
||||
) -> Result<Vec<(Position, &SpaceFields)>, String> {
|
||||
match shape {
|
||||
Shape::Point(position) => {
|
||||
if let Some(mbb) = view_port {
|
||||
if mbb.contains(position) {
|
||||
Ok(self.find(position))
|
||||
} else {
|
||||
Err(format!(
|
||||
if !mbb.contains(position) {
|
||||
return Err(format!(
|
||||
"View port '{:?}' does not contain '{:?}'",
|
||||
mbb, position
|
||||
))
|
||||
));
|
||||
}
|
||||
} else {
|
||||
Ok(self.find(position))
|
||||
}
|
||||
Ok(self
|
||||
.find(position)
|
||||
.into_iter()
|
||||
.map(|fields| (position.clone(), fields))
|
||||
.collect())
|
||||
}
|
||||
Shape::BoundingBox(bl, bh) => {
|
||||
if let Some(mbb) = view_port {
|
||||
@@ -215,7 +210,7 @@ impl SpaceIndex {
|
||||
let results = self
|
||||
.find_range(&lower, &higher)
|
||||
.into_iter()
|
||||
.filter(|p| (p.position().clone() - center.clone()).norm() <= radius.f64())
|
||||
.filter(|(position, _)| (position - center).norm() <= radius.f64())
|
||||
.collect();
|
||||
|
||||
Ok(results)
|
||||
|
||||
@@ -3,8 +3,6 @@ use std::collections::HashMap;
|
||||
use crate::database;
|
||||
use database::space;
|
||||
use database::Core;
|
||||
use database::DataBase;
|
||||
use database::SpaceObject;
|
||||
use database::SpaceSetObject;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
@@ -110,46 +108,49 @@ impl From<&space::Space> for Space {
|
||||
|
||||
Space {
|
||||
name: space.name().clone(),
|
||||
origin: space.origin().into(),
|
||||
origin: space.origin().clone().into(),
|
||||
axes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_spatial_objects(db: &DataBase, list: Vec<SpaceObject>) -> Vec<SpatialObject> {
|
||||
impl From<&&database::Properties> for Properties {
|
||||
fn from(p: &&database::Properties) -> Self {
|
||||
Properties {
|
||||
type_name: p.type_name().to_string(),
|
||||
id: p.id().clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_spatial_objects(
|
||||
list: Vec<(&String, Vec<(space::Position, &database::Properties)>)>,
|
||||
) -> Vec<SpatialObject> {
|
||||
// Filter per Properties, in order to regroup by it, then build a single SpatialObject per Properties.
|
||||
let mut properties = HashMap::new();
|
||||
for object in list {
|
||||
let k = object.value.id().clone();
|
||||
properties.entry(k).or_insert_with(|| vec![]).push(object);
|
||||
let mut hashmap = HashMap::new();
|
||||
for (space, v) in list {
|
||||
for (position, properties) in v {
|
||||
hashmap
|
||||
.entry(properties)
|
||||
.or_insert_with(|| vec![])
|
||||
.push((space, position));
|
||||
}
|
||||
}
|
||||
|
||||
let mut results = vec![];
|
||||
for (k, v) in properties.iter() {
|
||||
for (properties, v) in hashmap.iter() {
|
||||
// Group by spaces, to collect points shapes together
|
||||
let shapes = v
|
||||
.iter()
|
||||
.filter_map(|o| match db.space(&o.space_id) {
|
||||
Err(_) => None,
|
||||
Ok(space) => {
|
||||
if let Ok(vertices) = space.decode(&o.position) {
|
||||
Some(Shape {
|
||||
type_name: "Point".to_string(),
|
||||
reference_space: o.space_id.clone(),
|
||||
vertices: vec![vertices],
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
.map(|(space_id, position)| Shape {
|
||||
type_name: "Point".to_string(),
|
||||
reference_space: (*space_id).clone(),
|
||||
vertices: vec![position.into()],
|
||||
})
|
||||
.collect();
|
||||
|
||||
results.push(SpatialObject {
|
||||
properties: Properties {
|
||||
type_name: "Feature".to_string(),
|
||||
id: k.to_string(),
|
||||
},
|
||||
properties: properties.into(),
|
||||
shapes,
|
||||
});
|
||||
}
|
||||
|
||||
53
src/main.rs
53
src/main.rs
@@ -41,34 +41,34 @@ fn main() {
|
||||
output_space: None,
|
||||
threshold_volume: Some(std::f64::MAX),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_id(&c, id).unwrap();
|
||||
println!("get_by_id {}: {}", id, r.len());
|
||||
println!("{}: {:?}\n", id, r[0]);
|
||||
println!("{}: {:?}\n", id, r[0].1[0]);
|
||||
|
||||
let c = CoreQueryParameters {
|
||||
db: &db,
|
||||
output_space: None,
|
||||
threshold_volume: Some(0.0),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_id(&c, id).unwrap();
|
||||
println!("get_by_id {}: {}", id, r.len());
|
||||
println!("{}: {:?}\n", id, r[0]);
|
||||
println!("{}: {:?}\n", id, r[0].1[0]);
|
||||
|
||||
let c = CoreQueryParameters {
|
||||
db: &db,
|
||||
output_space: None,
|
||||
threshold_volume: Some(std::f64::MAX),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_label(&c, id).unwrap();
|
||||
println!("get_by_label {}: {}", id, r.len());
|
||||
if !r.is_empty() {
|
||||
println!("{}: {:?}\n", id, r[0]);
|
||||
println!("{}: {:?}\n", id, r[0].1[0]);
|
||||
}
|
||||
|
||||
let lower = space.encode(&[0.2, 0.2, 0.2]).unwrap();
|
||||
@@ -81,19 +81,45 @@ fn main() {
|
||||
output_space: None,
|
||||
threshold_volume: Some(0.0),
|
||||
view_port: &None,
|
||||
resolution: None,
|
||||
resolution: &None,
|
||||
};
|
||||
let r = core.get_by_shape(&c, &shape, "std").unwrap();
|
||||
println!("get_by_shape {:?}: {}", shape, r.len());
|
||||
println!("{:?}: {:?}\n", shape, r[0]);
|
||||
println!("{:?}: {:?}\n", shape, r[0].1[0]);
|
||||
|
||||
let a = r.iter().filter(|o| o.value.id() == id).collect::<Vec<_>>();
|
||||
let a = r
|
||||
.iter()
|
||||
.filter_map(|(space, v)| {
|
||||
let v = v
|
||||
.iter()
|
||||
.filter(|(_, properties)| properties.id() == id)
|
||||
.collect::<Vec<_>>();
|
||||
if v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((space, v))
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
println!("get_by_shape A {:?} filtered on {}: {}", shape, id, a.len());
|
||||
if !a.is_empty() {
|
||||
println!("{:?}\n", a[0]);
|
||||
println!("{:?}\n", a[0].1[0]);
|
||||
}
|
||||
|
||||
let a = r.iter().filter(|o| o.value.id() != id).collect::<Vec<_>>();
|
||||
let a = r
|
||||
.iter()
|
||||
.filter_map(|(space, v)| {
|
||||
let v = v
|
||||
.iter()
|
||||
.filter(|(_, properties)| properties.id() != id)
|
||||
.collect::<Vec<_>>();
|
||||
if v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((space, v))
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
println!(
|
||||
"get_by_shape !A {:?} filtered on {}: {}",
|
||||
shape,
|
||||
@@ -101,16 +127,17 @@ fn main() {
|
||||
a.len()
|
||||
);
|
||||
if !a.is_empty() {
|
||||
println!("{:?}\n", a[0]);
|
||||
println!("{:?}\n", a[0].1[0]);
|
||||
}
|
||||
|
||||
println!(
|
||||
"\nSPACE OBJECT:\n\n{}",
|
||||
serde_json::to_string_pretty(space).unwrap()
|
||||
);
|
||||
//FIXME: Not returning SpatialObjects by default
|
||||
println!(
|
||||
"\nSPATIAL OBJECT:\n\n{}",
|
||||
serde_json::to_string_pretty(a[0]).unwrap()
|
||||
serde_json::to_string_pretty(&a[0]).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user