Moving JSON model to mercator_db, as it is shared.

This commit is contained in:
2019-09-11 10:24:44 +02:00
parent 9d2cb9293d
commit 29e78454a0
5 changed files with 7 additions and 320 deletions

7
.idea/workspace.xml generated
View File

@@ -13,12 +13,13 @@
</component>
<component name="ChangeListManager">
<list default="true" id="4efa641e-9b05-442b-ba82-4d7003bc775c" name="Default Changelist" comment="">
<change beforePath="$PROJECT_DIR$/.gitignore" beforeDir="false" afterPath="$PROJECT_DIR$/.gitignore" afterDir="false" />
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Cargo.toml" beforeDir="false" afterPath="$PROJECT_DIR$/Cargo.toml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Cargo.lock" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/src/main.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/main.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/model.rs" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/src/rest_api/mod.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/rest_api/mod.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/rest_api/space.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/rest_api/space.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/rest_api/spatial_object.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/rest_api/spatial_object.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/storage.rs" beforeDir="false" />
</list>
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="SHOW_DIALOG" value="false" />

View File

@@ -1,22 +1,17 @@
// WebService framework
//#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
//extern crate actix;
//extern crate actix_web;
#[macro_use]
extern crate measure_time;
#[macro_use]
extern crate serde_derive;
mod model;
mod rest_api;
mod storage;
use std::process::exit;
use std::sync::Arc;
use std::sync::RwLock;
use mercator_db::json::model;
use mercator_db::json::storage;
use mercator_db::DataBase;
pub type SharedState = DataBase;

View File

@@ -1,214 +0,0 @@
use std::collections::HashMap;
use mercator_db::space;
use mercator_db::Core;
use mercator_db::DataBase;
use mercator_db::SpaceObject;
use mercator_db::SpaceSetObject;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Space {
pub name: String,
pub origin: Vec<f64>,
pub axes: Vec<Axis>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Axis {
pub measurement_unit: String,
pub graduation: Graduation,
pub unit_vector: Vec<f64>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Graduation {
pub set: String,
pub minimum: f64,
pub maximum: f64,
pub steps: u64,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SpatialObject {
pub properties: Properties,
pub shapes: Vec<Shape>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Shape {
#[serde(rename = "type")]
pub type_name: String,
#[serde(rename = "space")]
pub reference_space: String,
pub vertices: Vec<Point>,
}
type Point = Vec<f64>;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Properties {
#[serde(rename = "type")]
pub type_name: String,
pub id: String,
}
impl From<&space::Graduation> for Graduation {
fn from(g: &space::Graduation) -> Self {
Graduation {
set: g.set.clone().into(),
minimum: g.minimum,
maximum: g.maximum,
steps: g.steps,
}
}
}
impl From<Axis> for space::Axis {
fn from(axis: Axis) -> Self {
let g = axis.graduation;
space::Axis::new(
axis.measurement_unit,
axis.unit_vector,
g.set.into(),
g.minimum,
g.maximum,
g.steps,
)
.unwrap_or_else(|e| panic!("Unable to create Axis as defined: {}", e))
}
}
impl From<&space::Axis> for Axis {
fn from(axis: &space::Axis) -> Self {
Axis {
measurement_unit: axis.measurement_unit().clone(),
graduation: axis.graduation().into(),
unit_vector: axis.unit_vector().into(),
}
}
}
impl From<&Space> for space::Space {
fn from(space: &Space) -> Self {
let axes = space
.axes
.iter()
.map(|a| a.clone().into())
.collect::<Vec<_>>();
let system = space::CoordinateSystem::new(space.origin.clone(), axes);
space::Space::new(&space.name, system)
}
}
impl From<&space::Space> for Space {
fn from(space: &space::Space) -> Self {
let axes = space.axes().iter().map(|a| a.into()).collect::<Vec<_>>();
Space {
name: space.name().clone(),
origin: space.origin().into(),
axes,
}
}
}
pub fn to_spatial_objects(db: &DataBase, list: Vec<SpaceObject>) -> Vec<SpatialObject> {
// Filter per Properties, in order to regroup by it, then build a single SpatialObject per Properties.
let mut properties = HashMap::new();
for object in list {
let k = object.value.id().clone();
properties.entry(k).or_insert_with(|| vec![]).push(object);
}
let mut results = vec![];
for (k, v) in properties.iter() {
// Group by spaces, to collect points shapes together
let shapes = v
.iter()
.filter_map(|o| match db.space(&o.space_id) {
Err(_) => None,
Ok(space) => {
if let Ok(vertices) = space.decode(&o.position) {
Some(Shape {
type_name: "Point".to_string(),
reference_space: o.space_id.clone(),
vertices: vec![vertices],
})
} else {
None
}
}
})
.collect();
results.push(SpatialObject {
properties: Properties {
type_name: "Feature".to_string(),
id: k.to_string(),
},
shapes,
});
}
results
}
pub fn build_index(name: &str, spaces: &[space::Space], objects: &[SpatialObject]) -> Vec<Core> {
let mut properties = vec![];
let mut space_set_objects = vec![];
let mut properties_ref = vec![];
{
let mut properties_hm = HashMap::new();
for object in objects {
let value = match properties_hm.get(object.properties.id.as_str()) {
Some(_) => {
properties_ref.push(object.properties.id.as_str());
properties_ref.len() - 1
}
None => {
properties_hm.insert(
object.properties.id.as_str(),
mercator_db::Properties::Feature(object.properties.id.clone()),
);
properties_ref.push(object.properties.id.as_str());
properties_ref.len() - 1
}
};
for point in &object.shapes {
assert_eq!(point.type_name, "Point");
space_set_objects.push(SpaceSetObject::new(
&point.reference_space,
point.vertices[0].clone().into(),
value.into(),
))
}
}
properties.append(&mut properties_hm.drain().map(|(_, v)| v).collect::<Vec<_>>());
}
properties.sort_unstable_by_key(|p| p.id().clone());
space_set_objects.iter_mut().for_each(|object| {
let id = properties_ref[object.value().u64() as usize];
let value = properties.binary_search_by_key(&id, |p| p.id()).unwrap();
object.set_value(value.into());
});
vec![Core::new(
name,
"v0.1",
spaces,
properties,
space_set_objects,
)]
}

View File

@@ -32,7 +32,7 @@ pub fn get(
let db = state.state().shared.read().unwrap();
match db.core(core) {
Ok(core) => match core.get_by_id(&id, 0.0) {
Ok(core) => match core.get_by_id(&db, &id, None, 0.0) {
Ok(objects) => ok_200(&to_spatial_objects(&db, objects)),
Err(_) => error_404(),
},

View File

@@ -1,95 +0,0 @@
use std::fs::File;
use std::io::BufWriter;
use memmap::Mmap;
use mercator_db::DataBase;
use serde::de::DeserializeOwned;
use serde::Serialize;
use crate::model;
pub fn from_json<T>(from: &str, to: &str)
where
T: Serialize + DeserializeOwned,
{
let file_in =
File::open(from).unwrap_or_else(|e| panic!("Unable to read file: {}: {}", from, e));
let file_out =
File::create(to).unwrap_or_else(|e| panic!("Unable to create file: {}: {}", to, e));
// We create a buffered writer from the file we get
let writer = BufWriter::new(&file_out);
let mmap = unsafe {
Mmap::map(&file_in)
.unwrap_or_else(|e| panic!("Unable to map in memory the file: {}: {}", from, e))
};
let v: T = serde_json::from_slice(&mmap[..])
.unwrap_or_else(|e| panic!("Unable to parse the json data from: {}: {}", from, e));
bincode::serialize_into(writer, &v).unwrap();
}
//FIXME: Move to ironsea_store?
pub fn load<T>(from: &str) -> T
where
T: DeserializeOwned,
{
let file_in =
File::open(from).unwrap_or_else(|e| panic!("Unable to read file: {}: {}", from, e));
let mmap = unsafe {
Mmap::map(&file_in)
.unwrap_or_else(|e| panic!("Unable to map in memory the file: {}: {}", from, e))
};
bincode::deserialize(&mmap[..])
.unwrap_or_else(|e| panic!("Unable to parse the json data from: {}: {}", from, e))
}
//FIXME: Move to ironsea_store?
pub fn store<T>(data: T, to: &str)
where
T: Serialize,
{
let file_out =
File::create(to).unwrap_or_else(|e| panic!("Unable to create file: {}: {}", to, e));
// We create a buffered writer from the file we get
let writer = BufWriter::new(&file_out);
bincode::serialize_into(writer, &data).unwrap();
}
pub fn convert(name: &str) {
// Convert Reference Space definitions
let fn_in = format!("{}.spaces.json", name);
let fn_out = format!("{}.spaces.bin", name);
from_json::<Vec<model::Space>>(&fn_in, &fn_out);
// Convert Spatial Objects
let fn_in = format!("{}.objects.json", name);
let fn_out = format!("{}.objects.bin", name);
from_json::<Vec<model::SpatialObject>>(&fn_in, &fn_out);
}
pub fn build(name: &str) {
let fn_spaces = format!("{}.spaces.bin", name);
let fn_objects = format!("{}.objects.bin", name);
let fn_index = format!("{}.index", name);
let spaces = load::<Vec<model::Space>>(&fn_spaces)
.iter()
.map(|s| s.into())
.collect::<Vec<_>>();
let cores = model::build_index(
&name,
&spaces,
&load::<Vec<model::SpatialObject>>(&fn_objects),
);
store(DataBase::new(spaces, cores), &fn_index);
}