Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 521f40e36e | |||
| c14c6cb91a | |||
| 2566cac17f | |||
| 8a00180eb0 | |||
| 8492fed85e | |||
| 69fbc9fdd8 |
19
Cargo.toml
19
Cargo.toml
@@ -29,23 +29,24 @@ name = "parser-driver"
|
|||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
required-features = ["bin"]
|
required-features = ["bin"]
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
lto = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
bin = ["measure_time", "pretty_env_logger"]
|
bin = ["measure_time", "pretty_env_logger"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
mercator_db = "^0.1"
|
mercator_db = "0.1"
|
||||||
|
|
||||||
lalrpop-util = "^0.17"
|
lalrpop-util = "0.20"
|
||||||
regex = "^1.2"
|
|
||||||
|
|
||||||
# Logging macros API
|
# Logging macros API
|
||||||
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] }
|
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
|
||||||
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] }
|
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
|
||||||
|
|
||||||
# Used for main.rs
|
# Used for main.rs
|
||||||
pretty_env_logger = { version = "^0.3", optional = true } # Logger implementation
|
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
|
||||||
measure_time = { version = "^0.6", optional = true } # To mesure parsing time, only required by binary
|
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
lalrpop = "^0.17.1"
|
lalrpop = "0.20"
|
||||||
|
|
||||||
|
|||||||
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "1.80.0"
|
||||||
631
src/executors.rs
631
src/executors.rs
@@ -1,370 +1,358 @@
|
|||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
use mercator_db::space;
|
use mercator_db::space;
|
||||||
use mercator_db::Core;
|
use mercator_db::Core;
|
||||||
use mercator_db::CoreQueryParameters;
|
use mercator_db::CoreQueryParameters;
|
||||||
use mercator_db::Properties;
|
use mercator_db::IterObjects;
|
||||||
|
use mercator_db::IterObjectsBySpaces;
|
||||||
|
|
||||||
use super::expressions::*;
|
use super::expressions::*;
|
||||||
use super::symbols::*;
|
use super::symbols::*;
|
||||||
|
|
||||||
impl From<&LiteralPosition> for space::Position {
|
fn group_by_space<'s>(
|
||||||
fn from(literal: &LiteralPosition) -> Self {
|
list: IterObjectsBySpaces<'s>,
|
||||||
let v: Vec<f64> = literal.into();
|
) -> Box<dyn Iterator<Item = (&'s String, IterObjects<'s>)> + 's> {
|
||||||
v.into()
|
// Filter per Properties, in order to regroup by it, then build
|
||||||
|
// a single SpatialObject per Properties.
|
||||||
|
let mut hashmap = HashMap::new();
|
||||||
|
for (space, objects) in list {
|
||||||
|
hashmap.entry(space).or_insert_with(Vec::new).push(objects);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Box::new(hashmap.into_iter().map(|(space, objects)| {
|
||||||
|
let objects: IterObjects = Box::new(objects.into_iter().flatten());
|
||||||
|
(space, objects)
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&LiteralNumber> for space::Coordinate {
|
fn distinct_helper(list: IterObjectsBySpaces) -> IterObjectsBySpaces {
|
||||||
fn from(literal: &LiteralNumber) -> Self {
|
// Make sure to collect all objects iterators per space, so that
|
||||||
match literal {
|
// each space appears only once.
|
||||||
LiteralNumber::Float(f) => (*f).into(),
|
group_by_space(list)
|
||||||
LiteralNumber::Int(i) => (*i as u64).into(),
|
// We would lose some objects otherwise when creating the
|
||||||
}
|
// HashMaps. Also this makes sure to keep the values are unique.
|
||||||
}
|
.map(|(space, iter)| {
|
||||||
|
let uniques: HashSet<_> = iter.collect();
|
||||||
|
let uniques: IterObjects = Box::new(uniques.into_iter());
|
||||||
|
(space, uniques)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complement_helper<'c>(
|
fn into_positions_hashset(
|
||||||
core: &'c Core,
|
objects_by_spaces: IterObjectsBySpaces,
|
||||||
parameters: &CoreQueryParameters<'c>,
|
) -> HashMap<&String, Rc<HashSet<space::Position>>> {
|
||||||
space_id: &str,
|
// Make sure to collect all objects iterators per space, so that
|
||||||
inside: Vec<(&'c String, Vec<(space::Position, &'c Properties)>)>,
|
// each space appears only once.
|
||||||
) -> mercator_db::ResultSet<'c> {
|
group_by_space(objects_by_spaces)
|
||||||
|
// We would lose some objects otherwise when creating the HashSets.
|
||||||
|
.map(|(space, iter)| {
|
||||||
|
let hash_set: HashSet<_> = iter.map(|(position, _)| position).collect();
|
||||||
|
(space, Rc::new(hash_set))
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strictly not inside nor on the surface.
|
||||||
|
// TODO: inside must contains the valid positions in all expected spaces
|
||||||
|
fn complement_helper<'h>(
|
||||||
|
core: &'h Core,
|
||||||
|
parameters: &'h CoreQueryParameters<'h>,
|
||||||
|
space_id: &'h str,
|
||||||
|
inside: IterObjectsBySpaces<'h>,
|
||||||
|
) -> mercator_db::ResultSet<'h> {
|
||||||
let (low, high) = parameters.db.space(space_id)?.bounding_box();
|
let (low, high) = parameters.db.space(space_id)?.bounding_box();
|
||||||
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
|
let inside = into_positions_hashset(inside);
|
||||||
e @ Err(_) => e,
|
let points = core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
|
||||||
Ok(points) => {
|
|
||||||
let hashmap = inside.into_iter().collect::<HashMap<_, _>>();
|
|
||||||
|
|
||||||
Ok(points
|
let results = points
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(space, v)| match hashmap.get(space) {
|
.filter_map(move |(space, v)| match inside.get(space) {
|
||||||
None => None,
|
None => None, // Space not found, so no point might exist!
|
||||||
Some(list) => {
|
Some(volume) => {
|
||||||
Some((space, v.into_iter().filter(|t| !list.contains(t)).collect()))
|
let volume = volume.clone();
|
||||||
}
|
let iter: IterObjects = Box::new(v.filter(move |a| !volume.contains(&a.0)));
|
||||||
})
|
|
||||||
.collect::<Vec<_>>())
|
Some((space, iter))
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
.collect();
|
||||||
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn view_port<'c>(
|
// Intersection based only on spatial positions!
|
||||||
core_id: &str,
|
fn intersect_helper<'h>(
|
||||||
parameters: &CoreQueryParameters<'c>,
|
smaller: IterObjectsBySpaces<'h>,
|
||||||
bag: &Bag,
|
bigger: IterObjectsBySpaces<'h>,
|
||||||
) -> mercator_db::ResultSet<'c> {
|
) -> IterObjectsBySpaces<'h> {
|
||||||
if let Some((low, high)) = parameters.view_port {
|
let smaller = into_positions_hashset(smaller);
|
||||||
let vp = Bag::Inside(Shape::HyperRectangle(
|
|
||||||
bag.space().clone(),
|
|
||||||
vec![low.into(), high.into()],
|
|
||||||
));
|
|
||||||
intersection(core_id, parameters, &vp, bag)
|
|
||||||
} else {
|
|
||||||
bag.execute(core_id, parameters)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn distinct<'c>(
|
bigger
|
||||||
core_id: &str,
|
.into_iter()
|
||||||
parameters: &CoreQueryParameters<'c>,
|
.filter_map(
|
||||||
bag: &Bag,
|
move |(space, bigger_object_iter)| match smaller.get(space) {
|
||||||
) -> mercator_db::ResultSet<'c> {
|
None => None,
|
||||||
match bag.execute(core_id, parameters) {
|
Some(volume) => {
|
||||||
e @ Err(_) => e,
|
let volume = volume.clone();
|
||||||
Ok(mut v) => {
|
let filtered: IterObjects =
|
||||||
let set: HashSet<_> = v.drain(..).collect(); // dedup
|
Box::new(bigger_object_iter.filter(move |a| volume.contains(&a.0)));
|
||||||
v.extend(set.into_iter());
|
|
||||||
|
|
||||||
Ok(v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn filter_helper<'c>(
|
|
||||||
predicate: &Predicate,
|
|
||||||
bag: &Bag,
|
|
||||||
core_id: &str,
|
|
||||||
parameters: &CoreQueryParameters<'c>,
|
|
||||||
) -> mercator_db::ResultSet<'c> {
|
|
||||||
match bag.execute(core_id, parameters) {
|
|
||||||
e @ Err(_) => e,
|
|
||||||
Ok(results) => Ok(results
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(space, positions)| {
|
|
||||||
let filtered = positions
|
|
||||||
.into_iter()
|
|
||||||
.filter(|(position, properties)| predicate.eval((space, position, properties)))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if filtered.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some((space, filtered))
|
Some((space, filtered))
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
.collect::<Vec<_>>()),
|
)
|
||||||
}
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filter<'c>(
|
impl Bag {
|
||||||
core_id: &str,
|
fn distinct<'b>(
|
||||||
parameters: &CoreQueryParameters<'c>,
|
&'b self,
|
||||||
predicate: &Option<Predicate>,
|
core_id: &'b str,
|
||||||
bag: &Option<Box<Bag>>,
|
parameters: &'b CoreQueryParameters<'b>,
|
||||||
) -> mercator_db::ResultSet<'c> {
|
) -> mercator_db::ResultSet<'b> {
|
||||||
match predicate {
|
let results = self.execute(core_id, parameters)?;
|
||||||
None => {
|
|
||||||
if let Some(bag) = bag {
|
|
||||||
bag.execute(core_id, parameters)
|
|
||||||
} else {
|
|
||||||
Err("Filter without predicate nor data set.".to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(predicate) => match bag {
|
|
||||||
None => {
|
|
||||||
let (low, high) = space::Space::universe().bounding_box();
|
|
||||||
let low: Vec<_> = low.into();
|
|
||||||
let high: Vec<_> = high.into();
|
|
||||||
let shape = Shape::HyperRectangle(
|
|
||||||
space::Space::universe().name().clone(),
|
|
||||||
vec![
|
|
||||||
LiteralPosition(
|
|
||||||
low.into_iter()
|
|
||||||
.map(LiteralNumber::Float)
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
),
|
|
||||||
LiteralPosition(
|
|
||||||
high.into_iter()
|
|
||||||
.map(LiteralNumber::Float)
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
);
|
|
||||||
filter_helper(predicate, &Bag::Inside(shape), core_id, parameters)
|
|
||||||
}
|
|
||||||
Some(bag) => filter_helper(predicate, bag.as_ref(), core_id, parameters),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn complement<'c>(
|
Ok(distinct_helper(results))
|
||||||
core_id: &str,
|
}
|
||||||
parameters: &CoreQueryParameters<'c>,
|
|
||||||
core: &'c Core,
|
fn complement<'b>(
|
||||||
bag: &Bag,
|
&'b self,
|
||||||
) -> mercator_db::ResultSet<'c> {
|
core_id: &'b str,
|
||||||
match bag.execute(core_id, parameters) {
|
parameters: &'b CoreQueryParameters<'b>,
|
||||||
// FIXME: The complement of a set is computed within its definition space.
|
core: &'b Core,
|
||||||
e @ Err(_) => e,
|
) -> mercator_db::ResultSet<'b> {
|
||||||
Ok(inside) => complement_helper(
|
let inside = self.execute(core_id, parameters)?;
|
||||||
|
|
||||||
|
// FIXME: The complement of a set should be computed within its
|
||||||
|
// definition space. We don't know here so we use universe
|
||||||
|
complement_helper(
|
||||||
core,
|
core,
|
||||||
parameters,
|
parameters,
|
||||||
mercator_db::space::Space::universe().name(),
|
mercator_db::space::Space::universe().name(),
|
||||||
inside,
|
inside,
|
||||||
),
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn intersection<'b>(
|
||||||
|
&'b self,
|
||||||
|
core_id: &'b str,
|
||||||
|
parameters: &'b CoreQueryParameters<'b>,
|
||||||
|
rh: &'b Bag,
|
||||||
|
) -> mercator_db::ResultSet<'b> {
|
||||||
|
let left = self.execute(core_id, parameters)?;
|
||||||
|
let right = rh.execute(core_id, parameters)?;
|
||||||
|
|
||||||
|
let v = if rh.predict(parameters.db) < self.predict(parameters.db) {
|
||||||
|
intersect_helper(right, left)
|
||||||
|
} else {
|
||||||
|
intersect_helper(left, right)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn union<'b>(
|
||||||
|
&'b self,
|
||||||
|
core_id: &'b str,
|
||||||
|
parameters: &'b CoreQueryParameters<'b>,
|
||||||
|
rh: &'b Bag,
|
||||||
|
) -> mercator_db::ResultSet<'b> {
|
||||||
|
let mut left = self.execute(core_id, parameters)?;
|
||||||
|
let mut right = rh.execute(core_id, parameters)?;
|
||||||
|
|
||||||
|
let union = if rh.predict(parameters.db) < self.predict(parameters.db) {
|
||||||
|
left.append(&mut right);
|
||||||
|
left
|
||||||
|
} else {
|
||||||
|
right.append(&mut left);
|
||||||
|
right
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(union)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filter<'b>(
|
||||||
|
&'b self,
|
||||||
|
predicate: &'b Predicate,
|
||||||
|
core_id: &'b str,
|
||||||
|
parameters: &'b CoreQueryParameters<'b>,
|
||||||
|
) -> mercator_db::ResultSet<'b> {
|
||||||
|
let results = self.execute(core_id, parameters)?;
|
||||||
|
|
||||||
|
Ok(results
|
||||||
|
.into_iter()
|
||||||
|
.map(move |(space, positions)| {
|
||||||
|
let positions = positions.collect::<Vec<_>>();
|
||||||
|
(
|
||||||
|
space,
|
||||||
|
Box::new(positions.into_iter().filter(move |(position, properties)| {
|
||||||
|
predicate.eval((space, position, properties))
|
||||||
|
})) as IterObjects,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn intersection<'c>(
|
impl Shape {
|
||||||
core_id: &str,
|
fn inside<'s>(
|
||||||
parameters: &CoreQueryParameters<'c>,
|
&'s self,
|
||||||
rh: &Bag,
|
parameters: &'s CoreQueryParameters<'s>,
|
||||||
lh: &Bag,
|
core: &'s Core,
|
||||||
) -> mercator_db::ResultSet<'c> {
|
) -> mercator_db::ResultSet<'s> {
|
||||||
let l = lh.execute(core_id, parameters);
|
let db = parameters.db;
|
||||||
if let Ok(l) = l {
|
let param = match self {
|
||||||
let r = rh.execute(core_id, parameters);
|
Shape::Point(space_id, position) => {
|
||||||
if let Ok(r) = r {
|
let space = db.space(space_id)?;
|
||||||
let mut v = vec![];
|
let position: Vec<f64> = position.into();
|
||||||
|
let position = space.encode(&position)?;
|
||||||
|
Ok((space_id, space::Shape::Point(position)))
|
||||||
|
}
|
||||||
|
Shape::HyperRectangle(space_id, bounding_box) => {
|
||||||
|
if bounding_box.len() != 2 {
|
||||||
|
//FIXME: Support arbitrary HyperRectangles
|
||||||
|
Err(
|
||||||
|
"The number of position is different from 2, which is unsupported."
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let space = db.space(space_id)?;
|
||||||
|
let low: Vec<f64> = (&bounding_box[0]).into();
|
||||||
|
let high: Vec<f64> = (&bounding_box[1]).into();
|
||||||
|
let low = space.encode(&low)?;
|
||||||
|
let high = space.encode(&high)?;
|
||||||
|
|
||||||
if rh.predict(parameters.db) < lh.predict(parameters.db) {
|
Ok((space_id, space::Shape::BoundingBox(low, high)))
|
||||||
for o in r {
|
|
||||||
if l.contains(&o) {
|
|
||||||
v.push(o);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for o in l {
|
|
||||||
if r.contains(&o) {
|
|
||||||
v.push(o);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(v)
|
Shape::HyperSphere(space_id, position, radius) => {
|
||||||
} else {
|
let space = db.space(space_id)?;
|
||||||
r
|
let position: Vec<f64> = position.into();
|
||||||
|
let position = space.encode(&position)?;
|
||||||
|
|
||||||
|
// We have to provide a position with all the dimensions
|
||||||
|
// for the encoding to work as expected.
|
||||||
|
let mut r = vec![0f64; position.dimensions()];
|
||||||
|
r[0] = radius.into();
|
||||||
|
let radius = space.encode(&r)?[0];
|
||||||
|
|
||||||
|
Ok((space_id, space::Shape::HyperSphere(position, radius)))
|
||||||
|
}
|
||||||
|
Shape::Label(_, id) => {
|
||||||
|
// Not a real shape, so short circuit and return.
|
||||||
|
return core.get_by_label(parameters, id);
|
||||||
|
}
|
||||||
|
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
match param {
|
||||||
|
Ok((space_id, shape)) => core.get_by_shape(parameters, shape, space_id),
|
||||||
|
Err(e) => Err(e),
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
l
|
|
||||||
|
fn outside<'s>(
|
||||||
|
&'s self,
|
||||||
|
parameters: &'s CoreQueryParameters<'s>,
|
||||||
|
core: &'s Core,
|
||||||
|
) -> mercator_db::ResultSet<'s> {
|
||||||
|
let (space_id, inside) = match self {
|
||||||
|
Shape::Point(space_id, position) => {
|
||||||
|
let position: Vec<f64> = position.into();
|
||||||
|
let positions = vec![position.into()];
|
||||||
|
let inside = core.get_by_positions(parameters, positions, space_id)?;
|
||||||
|
|
||||||
|
Ok((space_id, inside))
|
||||||
|
}
|
||||||
|
Shape::HyperRectangle(space_id, bounding_box) => {
|
||||||
|
// We need to adapt the bounding_box to ensure the
|
||||||
|
// surface will not hit as part of the inside set, so we
|
||||||
|
// compute the biggest bounding box contained within the
|
||||||
|
// given box.
|
||||||
|
|
||||||
|
// Smallest increment possible
|
||||||
|
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
|
||||||
|
for _ in 0..bounding_box[0].dimensions() {
|
||||||
|
increment.push(f64::EPSILON);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add it to the lower bound
|
||||||
|
let mut low: space::Position = (&bounding_box[0]).into();
|
||||||
|
low += increment.clone().into();
|
||||||
|
|
||||||
|
// Substract it from the upper bound
|
||||||
|
let mut high: space::Position = (&bounding_box[1]).into();
|
||||||
|
high -= increment.into();
|
||||||
|
|
||||||
|
let inside =
|
||||||
|
core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
|
||||||
|
|
||||||
|
Ok((space_id, inside))
|
||||||
|
}
|
||||||
|
Shape::HyperSphere(space_id, center, radius) => {
|
||||||
|
// Smallest decrement possible, to exclude the surface
|
||||||
|
let mut radius: f64 = radius.into();
|
||||||
|
radius -= f64::EPSILON;
|
||||||
|
let center: space::Position = center.into();
|
||||||
|
|
||||||
|
let inside = core.get_by_shape(
|
||||||
|
parameters,
|
||||||
|
space::Shape::HyperSphere(center, radius.into()),
|
||||||
|
space_id,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok((space_id, inside))
|
||||||
|
}
|
||||||
|
Shape::Label(space_id, id) => {
|
||||||
|
let inside = core.get_by_label(parameters, id)?;
|
||||||
|
|
||||||
|
Ok((space_id, inside))
|
||||||
|
}
|
||||||
|
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
|
||||||
|
}?;
|
||||||
|
|
||||||
|
complement_helper(core, parameters, space_id, inside)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn union<'c>(
|
fn filter<'c>(
|
||||||
core_id: &str,
|
core_id: &'c str,
|
||||||
parameters: &CoreQueryParameters<'c>,
|
parameters: &'c CoreQueryParameters<'c>,
|
||||||
rh: &Bag,
|
predicate: &'c Option<Predicate>,
|
||||||
lh: &Bag,
|
bag: &'c Bag,
|
||||||
) -> mercator_db::ResultSet<'c> {
|
) -> mercator_db::ResultSet<'c> {
|
||||||
let l = lh.execute(core_id, parameters);
|
match predicate {
|
||||||
if let Ok(mut l) = l {
|
None => bag.execute(core_id, parameters),
|
||||||
let r = rh.execute(core_id, parameters);
|
Some(predicate) => bag.filter(predicate, core_id, parameters),
|
||||||
if let Ok(mut r) = r {
|
|
||||||
if rh.predict(parameters.db) < lh.predict(parameters.db) {
|
|
||||||
l.append(&mut r);
|
|
||||||
Ok(l)
|
|
||||||
} else {
|
|
||||||
r.append(&mut l);
|
|
||||||
Ok(r)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
r
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
l
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bag<'c>(
|
fn bag<'c>(
|
||||||
core_id: &str,
|
core_id: &'c str,
|
||||||
parameters: &CoreQueryParameters<'c>,
|
parameters: &'c CoreQueryParameters<'c>,
|
||||||
bags: &[Bag],
|
bags: &'c [Bag],
|
||||||
) -> mercator_db::ResultSet<'c> {
|
) -> mercator_db::ResultSet<'c> {
|
||||||
let mut v = vec![];
|
let mut results = Vec::new();
|
||||||
for bag in bags {
|
for bag in bags {
|
||||||
let b = bag.execute(core_id, parameters);
|
let mut result = bag.execute(core_id, parameters)?;
|
||||||
match b {
|
results.append(&mut result);
|
||||||
e @ Err(_) => {
|
|
||||||
return e;
|
|
||||||
}
|
|
||||||
Ok(mut b) => {
|
|
||||||
v.append(&mut b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(v)
|
Ok(results)
|
||||||
}
|
|
||||||
|
|
||||||
fn inside<'c>(
|
|
||||||
parameters: &CoreQueryParameters<'c>,
|
|
||||||
core: &'c Core,
|
|
||||||
shape: &Shape,
|
|
||||||
) -> mercator_db::ResultSet<'c> {
|
|
||||||
let db = parameters.db;
|
|
||||||
let param = match shape {
|
|
||||||
Shape::Point(space_id, position) => {
|
|
||||||
let space = db.space(space_id)?;
|
|
||||||
let position: Vec<f64> = position.into();
|
|
||||||
let position = space.encode(&position)?;
|
|
||||||
Ok((space_id, space::Shape::Point(position)))
|
|
||||||
}
|
|
||||||
Shape::HyperRectangle(space_id, bounding_box) => {
|
|
||||||
if bounding_box.len() != 2 {
|
|
||||||
Err("The number of position is different from 2, which is unsupported.".to_string())
|
|
||||||
} else {
|
|
||||||
let space = db.space(space_id)?;
|
|
||||||
let low: Vec<f64> = (&bounding_box[0]).into();
|
|
||||||
let high: Vec<f64> = (&bounding_box[1]).into();
|
|
||||||
let low = space.encode(&low)?;
|
|
||||||
let high = space.encode(&high)?;
|
|
||||||
|
|
||||||
Ok((space_id, space::Shape::BoundingBox(low, high)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Shape::HyperSphere(space_id, position, radius) => {
|
|
||||||
let space = db.space(space_id)?;
|
|
||||||
let position: Vec<f64> = position.into();
|
|
||||||
let position = space.encode(&position)?;
|
|
||||||
let mut r = vec![];
|
|
||||||
for _ in 0..position.dimensions() {
|
|
||||||
r.push(radius.into());
|
|
||||||
}
|
|
||||||
let radius = space.encode(&r)?[0];
|
|
||||||
|
|
||||||
//FIXME: RADIUS IS A LENGTH, HOW TO ENCODE IT INTO THE SPACE?
|
|
||||||
Ok((space_id, space::Shape::HyperSphere(position, radius)))
|
|
||||||
}
|
|
||||||
Shape::Label(_, id) => {
|
|
||||||
// Not a real shape, so short circuit and return.
|
|
||||||
return core.get_by_label(parameters, id);
|
|
||||||
}
|
|
||||||
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
match param {
|
|
||||||
Ok((space_id, shape)) => core.get_by_shape(parameters, &shape, space_id),
|
|
||||||
Err(e) => Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn outside<'c>(
|
|
||||||
parameters: &CoreQueryParameters<'c>,
|
|
||||||
core: &'c Core,
|
|
||||||
shape: &Shape,
|
|
||||||
) -> mercator_db::ResultSet<'c> {
|
|
||||||
match shape {
|
|
||||||
Shape::Point(space_id, position) => {
|
|
||||||
let position: Vec<f64> = position.into();
|
|
||||||
match core.get_by_positions(parameters, &[position.into()], space_id) {
|
|
||||||
e @ Err(_) => e,
|
|
||||||
Ok(inside) => complement_helper(core, parameters, space_id, inside),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Shape::HyperRectangle(space_id, bounding_box) => {
|
|
||||||
// We need to adapt the bounding_box to ensure the
|
|
||||||
// surface will not hit as part of the inside set, so we
|
|
||||||
// compute the biggest bounding box contained within the
|
|
||||||
// given box.
|
|
||||||
|
|
||||||
// Smallest increment possible
|
|
||||||
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
|
|
||||||
for _ in 0..bounding_box[0].dimensions() {
|
|
||||||
increment.push(std::f64::EPSILON);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add it to the lower bound
|
|
||||||
let mut low: space::Position = (&bounding_box[0]).into();
|
|
||||||
low += increment.clone().into();
|
|
||||||
|
|
||||||
// Substract it from the upper bound
|
|
||||||
let mut high: space::Position = (&bounding_box[1]).into();
|
|
||||||
high -= increment.into();
|
|
||||||
|
|
||||||
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
|
|
||||||
e @ Err(_) => e,
|
|
||||||
Ok(inside) => complement_helper(core, parameters, space_id, inside),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Shape::HyperSphere(space_id, center, radius) => {
|
|
||||||
// Smallest decrement possible, to exclude the surface
|
|
||||||
let mut radius: f64 = radius.into();
|
|
||||||
radius -= std::f64::EPSILON;
|
|
||||||
let center: space::Position = center.into();
|
|
||||||
|
|
||||||
match core.get_by_shape(
|
|
||||||
parameters,
|
|
||||||
&space::Shape::HyperSphere(center, radius.into()),
|
|
||||||
space_id,
|
|
||||||
) {
|
|
||||||
e @ Err(_) => e,
|
|
||||||
Ok(inside) => complement_helper(core, parameters, space_id, inside),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Shape::Label(_, _) => Err("Label: not yet implemented".to_string()),
|
|
||||||
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'e> Executor<'e> for Projection {
|
impl<'e> Executor<'e> for Projection {
|
||||||
type ResultSet = mercator_db::ResultSet<'e>;
|
type ResultSet = mercator_db::ResultSet<'e>;
|
||||||
|
|
||||||
fn execute<'f: 'e>(
|
fn execute(
|
||||||
&self,
|
&'e self,
|
||||||
core_id: &str,
|
core_id: &'e str,
|
||||||
parameters: &CoreQueryParameters<'f>,
|
parameters: &'e CoreQueryParameters<'e>,
|
||||||
) -> Self::ResultSet {
|
) -> Self::ResultSet {
|
||||||
match self {
|
match self {
|
||||||
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
|
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
|
||||||
Projection::JSON(_, _format, bag) => {
|
Projection::Json(_, _format, bag) => {
|
||||||
bag.execute(core_id, parameters)
|
bag.execute(core_id, parameters)
|
||||||
// FIXME: Add projections here
|
// FIXME: Add projections here
|
||||||
}
|
}
|
||||||
@@ -375,27 +363,26 @@ impl<'e> Executor<'e> for Projection {
|
|||||||
impl<'e> Executor<'e> for Bag {
|
impl<'e> Executor<'e> for Bag {
|
||||||
type ResultSet = mercator_db::ResultSet<'e>;
|
type ResultSet = mercator_db::ResultSet<'e>;
|
||||||
|
|
||||||
fn execute<'f: 'e>(
|
fn execute(
|
||||||
&self,
|
&'e self,
|
||||||
core_id: &str,
|
core_id: &'e str,
|
||||||
parameters: &CoreQueryParameters<'f>,
|
parameters: &'e CoreQueryParameters<'e>,
|
||||||
) -> Self::ResultSet {
|
) -> Self::ResultSet {
|
||||||
let core = parameters.db.core(core_id)?;
|
let core = parameters.db.core(core_id)?;
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
Bag::ViewPort(bag) => view_port(core_id, parameters, bag),
|
Bag::Distinct(bag) => bag.distinct(core_id, parameters),
|
||||||
Bag::Distinct(bag) => distinct(core_id, parameters, bag),
|
|
||||||
Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag),
|
Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag),
|
||||||
Bag::Complement(bag) => complement(core_id, parameters, core, bag),
|
Bag::Complement(bag) => bag.complement(core_id, parameters, core),
|
||||||
Bag::Intersection(lh, rh) => intersection(core_id, parameters, rh, lh),
|
Bag::Intersection(lh, rh) => lh.intersection(core_id, parameters, rh),
|
||||||
Bag::Union(lh, rh) => union(core_id, parameters, rh, lh),
|
Bag::Union(lh, rh) => lh.union(core_id, parameters, rh),
|
||||||
Bag::Bag(list) => bag(core_id, parameters, list),
|
Bag::Bag(list) => bag(core_id, parameters, list),
|
||||||
Bag::Inside(shape) => inside(parameters, core, shape),
|
Bag::Inside(shape) => shape.inside(parameters, core),
|
||||||
Bag::Outside(shape) => {
|
Bag::Outside(shape) => {
|
||||||
//FIXME: This is currently computed as the complement of the values within the shape, except its surface.
|
//FIXME: This is currently computed as the complement of the values within the shape, except its surface.
|
||||||
// Should this be instead a list of positions within the shape?
|
// Should this be instead a list of positions within the shape?
|
||||||
//FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested?
|
//FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested?
|
||||||
outside(parameters, core, shape)
|
shape.outside(parameters, core)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,10 +14,10 @@ pub trait Predictor {
|
|||||||
pub trait Executor<'e> {
|
pub trait Executor<'e> {
|
||||||
type ResultSet;
|
type ResultSet;
|
||||||
|
|
||||||
fn execute<'f: 'e>(
|
fn execute(
|
||||||
&self,
|
&'e self,
|
||||||
core_id: &str,
|
core_id: &'e str,
|
||||||
parameters: &CoreQueryParameters<'f>,
|
parameters: &'e CoreQueryParameters<'e>,
|
||||||
) -> Self::ResultSet;
|
) -> Self::ResultSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
16
src/lib.rs
16
src/lib.rs
@@ -38,20 +38,20 @@ lalrpop_mod!(#[allow(clippy::all,unused_parens)] pub queries); // synthesized by
|
|||||||
// as well.
|
// as well.
|
||||||
// Instead we enable it per modules below, except for the tests.
|
// Instead we enable it per modules below, except for the tests.
|
||||||
|
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod evaluators;
|
mod evaluators;
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod executors;
|
mod executors;
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod expressions;
|
mod expressions;
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod predictors;
|
mod predictors;
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod validators;
|
mod validators;
|
||||||
|
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod symbols;
|
mod symbols;
|
||||||
#[warn(missing_docs)]
|
//#[warn(missing_docs)]
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
pub use expressions::Executor;
|
pub use expressions::Executor;
|
||||||
@@ -59,6 +59,8 @@ pub use expressions::Predictor;
|
|||||||
pub use expressions::Validator;
|
pub use expressions::Validator;
|
||||||
pub use queries::FiltersParser;
|
pub use queries::FiltersParser;
|
||||||
pub use queries::QueryParser;
|
pub use queries::QueryParser;
|
||||||
|
pub use symbols::Bag;
|
||||||
|
pub use symbols::Projection;
|
||||||
pub use validators::ValidationResult;
|
pub use validators::ValidationResult;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
27
src/main.rs
27
src/main.rs
@@ -7,11 +7,11 @@ use std::io;
|
|||||||
|
|
||||||
use mercator_db::CoreQueryParameters;
|
use mercator_db::CoreQueryParameters;
|
||||||
use mercator_db::DataBase;
|
use mercator_db::DataBase;
|
||||||
use parser::Executor;
|
use mercator_parser::Executor;
|
||||||
use parser::FiltersParser;
|
use mercator_parser::FiltersParser;
|
||||||
use parser::Predictor;
|
use mercator_parser::Predictor;
|
||||||
use parser::QueryParser;
|
use mercator_parser::QueryParser;
|
||||||
use parser::Validator;
|
use mercator_parser::Validator;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// If RUST_LOG is unset, set it to INFO, otherwise keep it as-is.
|
// If RUST_LOG is unset, set it to INFO, otherwise keep it as-is.
|
||||||
@@ -93,12 +93,17 @@ fn main() {
|
|||||||
execute = t.execute(core, ¶meters);
|
execute = t.execute(core, ¶meters);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(r) = execute {
|
match execute {
|
||||||
//let r = mercator_db::json::model::to_spatial_objects(r);
|
Ok(r) => {
|
||||||
info!("Execution: \n{:#?}", r);
|
let r = r
|
||||||
info!("NB results: {:?}", r.len());
|
.into_iter()
|
||||||
} else {
|
.map(|(space, objects)| (space, objects.collect::<Vec<_>>()))
|
||||||
info!("Execution: \n{:?}", execute);
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
info!("Execution: \n{:#?}", r);
|
||||||
|
info!("NB results: {:?}", r[0].1.len());
|
||||||
|
}
|
||||||
|
Err(e) => info!("Execution: \n{:?}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use mercator_db::space;
|
|
||||||
use mercator_db::DataBase;
|
use mercator_db::DataBase;
|
||||||
|
|
||||||
use super::expressions::Predictor;
|
use super::expressions::Predictor;
|
||||||
@@ -8,7 +7,7 @@ impl Predictor for Projection {
|
|||||||
fn predict(&self, db: &DataBase) -> Result<f64, String> {
|
fn predict(&self, db: &DataBase) -> Result<f64, String> {
|
||||||
match self {
|
match self {
|
||||||
Projection::Nifti(_, _, bag) => bag.predict(db),
|
Projection::Nifti(_, _, bag) => bag.predict(db),
|
||||||
Projection::JSON(_, _, bag) => bag.predict(db),
|
Projection::Json(_, _, bag) => bag.predict(db),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -16,12 +15,8 @@ impl Predictor for Projection {
|
|||||||
impl Predictor for Bag {
|
impl Predictor for Bag {
|
||||||
fn predict(&self, db: &DataBase) -> Result<f64, String> {
|
fn predict(&self, db: &DataBase) -> Result<f64, String> {
|
||||||
match self {
|
match self {
|
||||||
Bag::ViewPort(bag) => bag.predict(db),
|
|
||||||
Bag::Distinct(bag) => bag.predict(db),
|
Bag::Distinct(bag) => bag.predict(db),
|
||||||
Bag::Filter(_, bag) => match bag {
|
Bag::Filter(_, bag) => bag.predict(db),
|
||||||
None => Ok(db.space(space::Space::universe().name())?.volume()),
|
|
||||||
Some(b) => b.predict(db),
|
|
||||||
},
|
|
||||||
Bag::Complement(bag) => Ok(db.space(bag.space())?.volume() - bag.predict(db)?),
|
Bag::Complement(bag) => Ok(db.space(bag.space())?.volume() - bag.predict(db)?),
|
||||||
Bag::Intersection(lh, rh) => {
|
Bag::Intersection(lh, rh) => {
|
||||||
let l = lh.predict(db)?;
|
let l = lh.predict(db)?;
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ JsonOperator: symbols::Projection = {
|
|||||||
None => Space::universe().name().clone(),
|
None => Space::universe().name().clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
symbols::Projection::JSON(space_id, f, b)
|
symbols::Projection::Json(space_id, f, b)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -137,8 +137,6 @@ Aggregations: symbols::Aggregation = {
|
|||||||
//*********************************************************************/
|
//*********************************************************************/
|
||||||
pub Filters: symbols::Bag = {
|
pub Filters: symbols::Bag = {
|
||||||
<Bags>
|
<Bags>
|
||||||
//<Bags> =>
|
|
||||||
// symbols::Bag::ViewPort(Box::new(<>))
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// All these expressions generate bags.
|
// All these expressions generate bags.
|
||||||
@@ -189,13 +187,25 @@ Union: symbols::Bag = {
|
|||||||
Filter: symbols::Bag = {
|
Filter: symbols::Bag = {
|
||||||
// "filter" "(" <p:Predicates> "," <b:Bags> ")" =>
|
// "filter" "(" <p:Predicates> "," <b:Bags> ")" =>
|
||||||
"filter" "(" <b:Bags> ")" =>
|
"filter" "(" <b:Bags> ")" =>
|
||||||
symbols::Bag::Filter(None, Some(Box::new(b))),
|
symbols::Bag::Filter(None, Box::new(b)),
|
||||||
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" =>
|
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" => {
|
||||||
match b {
|
match b {
|
||||||
None => symbols::Bag::Filter(Some(p), None),
|
None => {
|
||||||
Some(b) => symbols::Bag::Filter(Some(p), Some(Box::new(b))),
|
let (low, high) = Space::universe().bounding_box();
|
||||||
|
let low: Vec<_> = low.into();
|
||||||
|
let high: Vec<_> = high.into();
|
||||||
|
let shape = symbols::Shape::HyperRectangle(
|
||||||
|
Space::universe().name().clone(),
|
||||||
|
vec![
|
||||||
|
symbols::LiteralPosition(low.into_iter().map(symbols::LiteralNumber::Float).collect()),
|
||||||
|
symbols::LiteralPosition(high.into_iter().map(symbols::LiteralNumber::Float).collect()),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
symbols::Bag::Filter(Some(p), Box::new(symbols::Bag::Inside(shape)))
|
||||||
|
}
|
||||||
|
Some(b) => symbols::Bag::Filter(Some(p), Box::new(b)),
|
||||||
}
|
}
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
Predicates: symbols::Predicate = {
|
Predicates: symbols::Predicate = {
|
||||||
|
|||||||
122
src/symbols.rs
122
src/symbols.rs
@@ -11,14 +11,14 @@ pub use super::types::*;
|
|||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Projection {
|
pub enum Projection {
|
||||||
Nifti(String, LiteralSelector, Bag),
|
Nifti(String, LiteralSelector, Bag),
|
||||||
JSON(String, JsonValue, Bag),
|
Json(String, JsonValue, Bag),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Projection {
|
impl Projection {
|
||||||
pub fn space(&self) -> &String {
|
pub fn space(&self) -> &String {
|
||||||
match self {
|
match self {
|
||||||
Projection::Nifti(space, _, _) => &space,
|
Projection::Nifti(space, _, _) => space,
|
||||||
Projection::JSON(space, _, _) => &space,
|
Projection::Json(space, _, _) => space,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -57,11 +57,9 @@ struct Transform {
|
|||||||
/**********************************************************************/
|
/**********************************************************************/
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Bag {
|
pub enum Bag {
|
||||||
// This is an implicit operator, inserted by the parser. Never to be used directly.
|
|
||||||
ViewPort(Box<Bag>),
|
|
||||||
// Bags
|
// Bags
|
||||||
Distinct(Box<Bag>),
|
Distinct(Box<Bag>),
|
||||||
Filter(Option<Predicate>, Option<Box<Bag>>),
|
Filter(Option<Predicate>, Box<Bag>),
|
||||||
Complement(Box<Bag>),
|
Complement(Box<Bag>),
|
||||||
Intersection(Box<Bag>, Box<Bag>),
|
Intersection(Box<Bag>, Box<Bag>),
|
||||||
Union(Box<Bag>, Box<Bag>),
|
Union(Box<Bag>, Box<Bag>),
|
||||||
@@ -75,12 +73,8 @@ pub enum Bag {
|
|||||||
impl Bag {
|
impl Bag {
|
||||||
pub fn space(&self) -> &String {
|
pub fn space(&self) -> &String {
|
||||||
match self {
|
match self {
|
||||||
Bag::ViewPort(bag) => bag.space(),
|
|
||||||
Bag::Distinct(bag) => bag.space(),
|
Bag::Distinct(bag) => bag.space(),
|
||||||
Bag::Filter(_, bag) => match bag {
|
Bag::Filter(_, bag) => bag.space(),
|
||||||
None => space::Space::universe().name(),
|
|
||||||
Some(b) => b.space(),
|
|
||||||
},
|
|
||||||
Bag::Complement(bag) => bag.space(),
|
Bag::Complement(bag) => bag.space(),
|
||||||
Bag::Intersection(lh, _) => {
|
Bag::Intersection(lh, _) => {
|
||||||
// We are assuming lh and rh are in the same space.
|
// We are assuming lh and rh are in the same space.
|
||||||
@@ -144,7 +138,7 @@ impl Shape {
|
|||||||
|
|
||||||
pub fn volume(&self) -> f64 {
|
pub fn volume(&self) -> f64 {
|
||||||
match self {
|
match self {
|
||||||
Shape::Point(_, _) => std::f64::EPSILON, // The smallest non-zero volume possible
|
Shape::Point(_, _) => f64::EPSILON, // The smallest non-zero volume possible
|
||||||
Shape::HyperRectangle(_space, pos) => {
|
Shape::HyperRectangle(_space, pos) => {
|
||||||
//TODO: At this time, only aligned to the axes, defined by two points, hyperrectangles are supported.
|
//TODO: At this time, only aligned to the axes, defined by two points, hyperrectangles are supported.
|
||||||
assert_eq!(pos.len(), 2);
|
assert_eq!(pos.len(), 2);
|
||||||
@@ -208,7 +202,7 @@ impl Shape {
|
|||||||
}
|
}
|
||||||
Shape::Label(_, _) => {
|
Shape::Label(_, _) => {
|
||||||
// FIXME: Needs to find a way to figure out the approximate volume of this specific ID, or return MAX or MIN..
|
// FIXME: Needs to find a way to figure out the approximate volume of this specific ID, or return MAX or MIN..
|
||||||
std::f64::EPSILON
|
f64::EPSILON
|
||||||
}
|
}
|
||||||
Shape::Nifti(_) => unimplemented!("Nifti"),
|
Shape::Nifti(_) => unimplemented!("Nifti"),
|
||||||
}
|
}
|
||||||
@@ -243,7 +237,9 @@ impl Position {
|
|||||||
Ordering::Greater => 1,
|
Ordering::Greater => 1,
|
||||||
Ordering::Less => -1,
|
Ordering::Less => -1,
|
||||||
};
|
};
|
||||||
LiteralPosition(vec![LiteralNumber::Int(x)])
|
|
||||||
|
let v = vec![LiteralNumber::Int(x)];
|
||||||
|
LiteralPosition(v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -262,14 +258,33 @@ pub enum LiteralNumber {
|
|||||||
Float(f64),
|
Float(f64),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&LiteralNumber> for Vec<f64> {
|
impl From<&LiteralNumber> for f64 {
|
||||||
fn from(l: &LiteralNumber) -> Self {
|
fn from(l: &LiteralNumber) -> Self {
|
||||||
let r = match l {
|
match l {
|
||||||
LiteralNumber::Int(x) => (*x) as f64,
|
LiteralNumber::Int(x) => (*x) as f64,
|
||||||
LiteralNumber::Float(x) => *x,
|
LiteralNumber::Float(x) => *x,
|
||||||
};
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
vec![r]
|
impl From<LiteralNumber> for f64 {
|
||||||
|
fn from(l: LiteralNumber) -> Self {
|
||||||
|
(&l).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&LiteralNumber> for space::Coordinate {
|
||||||
|
fn from(literal: &LiteralNumber) -> Self {
|
||||||
|
match literal {
|
||||||
|
LiteralNumber::Float(f) => (*f).into(),
|
||||||
|
LiteralNumber::Int(i) => (*i as u64).into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<LiteralNumber> for space::Coordinate {
|
||||||
|
fn from(literal: LiteralNumber) -> Self {
|
||||||
|
(&literal).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -294,7 +309,7 @@ pub struct LiteralPosition(pub Vec<LiteralNumber>);
|
|||||||
impl LiteralPosition {
|
impl LiteralPosition {
|
||||||
pub fn get_type(&self) -> LiteralTypes {
|
pub fn get_type(&self) -> LiteralTypes {
|
||||||
let Self(v) = self;
|
let Self(v) = self;
|
||||||
let mut t = Vec::new();
|
let mut t = Vec::with_capacity(v.len());
|
||||||
|
|
||||||
for n in v {
|
for n in v {
|
||||||
t.push(match n {
|
t.push(match n {
|
||||||
@@ -327,34 +342,35 @@ impl LiteralPosition {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&LiteralNumber> for f64 {
|
impl From<&LiteralPosition> for Vec<f64> {
|
||||||
fn from(l: &LiteralNumber) -> Self {
|
fn from(l: &LiteralPosition) -> Self {
|
||||||
match l {
|
// Speed-wise this should be the same, the downside is the newly
|
||||||
LiteralNumber::Int(x) => (*x) as f64,
|
// allocated vector might be suboptimal in terms of space.
|
||||||
LiteralNumber::Float(x) => *x,
|
//let LiteralPosition(v) = l;
|
||||||
|
//v.iter().map(|literal| literal.into()).collect()
|
||||||
|
|
||||||
|
let LiteralPosition(v) = l;
|
||||||
|
let mut lv = Vec::with_capacity(v.len());
|
||||||
|
for value in v {
|
||||||
|
lv.push(value.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lv
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&LiteralPosition> for Vec<f64> {
|
impl From<LiteralPosition> for Vec<f64> {
|
||||||
fn from(l: &LiteralPosition) -> Self {
|
fn from(l: LiteralPosition) -> Self {
|
||||||
let LiteralPosition(v) = l;
|
(&l).into()
|
||||||
let mut r = Vec::with_capacity(v.len());
|
|
||||||
|
|
||||||
for x in v {
|
|
||||||
let x = match x {
|
|
||||||
LiteralNumber::Int(x) => (*x) as f64,
|
|
||||||
LiteralNumber::Float(x) => *x,
|
|
||||||
};
|
|
||||||
r.push(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
r
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Vec<f64>> for LiteralPosition {
|
impl From<&Vec<f64>> for LiteralPosition {
|
||||||
fn from(v: &Vec<f64>) -> Self {
|
fn from(v: &Vec<f64>) -> Self {
|
||||||
|
// Speed-wise this should be the same, the downside is the newly
|
||||||
|
// allocated vector might be suboptimal in terms of space.
|
||||||
|
//LiteralPosition(v.iter().map(|value| LiteralNumber::Float(*value)).collect())
|
||||||
|
|
||||||
let mut lv = Vec::with_capacity(v.len());
|
let mut lv = Vec::with_capacity(v.len());
|
||||||
for value in v {
|
for value in v {
|
||||||
lv.push(LiteralNumber::Float(*value));
|
lv.push(LiteralNumber::Float(*value));
|
||||||
@@ -363,10 +379,36 @@ impl From<&Vec<f64>> for LiteralPosition {
|
|||||||
LiteralPosition(lv)
|
LiteralPosition(lv)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<Vec<f64>> for LiteralPosition {
|
||||||
|
fn from(v: Vec<f64>) -> Self {
|
||||||
|
(&v).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<&space::Position> for LiteralPosition {
|
impl From<&space::Position> for LiteralPosition {
|
||||||
fn from(position: &space::Position) -> Self {
|
fn from(position: &space::Position) -> Self {
|
||||||
let lv: Vec<f64> = position.into();
|
let position: Vec<f64> = position.into();
|
||||||
(&lv).into()
|
position.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<space::Position> for LiteralPosition {
|
||||||
|
fn from(position: space::Position) -> Self {
|
||||||
|
(&position).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&LiteralPosition> for space::Position {
|
||||||
|
fn from(position: &LiteralPosition) -> Self {
|
||||||
|
let position: Vec<f64> = position.into();
|
||||||
|
position.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<LiteralPosition> for space::Position {
|
||||||
|
fn from(position: LiteralPosition) -> Self {
|
||||||
|
(&position).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
432
src/tests.rs
432
src/tests.rs
@@ -16,7 +16,7 @@ mod parsing {
|
|||||||
fn query() {
|
fn query() {
|
||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
let nifti = "nifti(point{[0]})";
|
let nifti = "nifti(inside(point{[0]}))";
|
||||||
|
|
||||||
// Option is Empty
|
// Option is Empty
|
||||||
assert!(p.parse("").is_ok());
|
assert!(p.parse("").is_ok());
|
||||||
@@ -47,14 +47,14 @@ mod parsing {
|
|||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
// Check allowed forms of the operator
|
// Check allowed forms of the operator
|
||||||
assert!(p.parse("nifti(point{[0]})").is_ok());
|
assert!(p.parse("nifti(inside(point{[0]}))").is_ok());
|
||||||
assert!(p.parse("nifti(.properties.id, point{[0]})").is_ok());
|
assert!(p.parse("nifti(.properties.id, inside(point{[0]}))").is_ok());
|
||||||
|
|
||||||
unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE.
|
unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE.
|
||||||
|
|
||||||
//FIXME: THIS SHOULD BE ALLOWED
|
//FIXME: THIS SHOULD BE ALLOWED
|
||||||
assert!(p.parse("nifti(2, point{[0]})").is_ok());
|
assert!(p.parse("nifti(2, inside(point{[0]}))").is_ok());
|
||||||
assert!(p.parse("nifti(2.23, point{[0]})").is_ok());
|
assert!(p.parse("nifti(2.23, inside(point{[0]}))").is_ok());
|
||||||
|
|
||||||
//FIXME: SYNTAX OK, TYPE NOT
|
//FIXME: SYNTAX OK, TYPE NOT
|
||||||
assert!(p.parse("nifti(point{[0], \"space\"})").is_err());
|
assert!(p.parse("nifti(point{[0], \"space\"})").is_err());
|
||||||
@@ -64,16 +64,16 @@ mod parsing {
|
|||||||
fn json_operator() {
|
fn json_operator() {
|
||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
assert!(p.parse("json(true, point{[0]})").is_ok());
|
assert!(p.parse("json(true, inside(point{[0]}))").is_ok());
|
||||||
assert!(p.parse("json(23, point{[0]})").is_ok());
|
assert!(p.parse("json(23, inside(point{[0]}))").is_ok());
|
||||||
assert!(p.parse("json([23, 24], point{[0]})").is_ok());
|
assert!(p.parse("json([23, 24], inside(point{[0]}))").is_ok());
|
||||||
assert!(p.parse("json([23, count(.)], point{[0]})").is_ok());
|
assert!(p.parse("json([23, count(.)], inside(point{[0]}))").is_ok());
|
||||||
|
|
||||||
assert!(p.parse("json(true)").is_err());
|
assert!(p.parse("json(true)").is_err());
|
||||||
assert!(p.parse("json(true,)").is_err());
|
assert!(p.parse("json(true,)").is_err());
|
||||||
|
|
||||||
assert!(p.parse("json(, point{[0]})").is_err());
|
assert!(p.parse("json(, inside(point{[0]}))").is_err());
|
||||||
assert!(p.parse("json(point{[0]})").is_err());
|
assert!(p.parse("json(inside(point{[0]}))").is_err());
|
||||||
|
|
||||||
assert!(p.parse("json(true, point)").is_err());
|
assert!(p.parse("json(true, point)").is_err());
|
||||||
}
|
}
|
||||||
@@ -83,24 +83,24 @@ mod parsing {
|
|||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "true").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "true").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "false").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "false").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "null").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "null").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Incorrect capitalisation
|
// Incorrect capitalisation
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "True").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "True").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "False").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "False").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "Null").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "Null").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -109,24 +109,24 @@ mod parsing {
|
|||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{}").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0}").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0, \"field1\": 1}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0, \"field1\": 1}").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"field\": [0, 1]}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": [0, 1]}").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"field\": {\"field1\": 0}}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": {\"field1\": 0}}").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!(
|
format!(
|
||||||
"json({}, point{{[0]}})",
|
"json({}, inside(point{{[0]}}))",
|
||||||
"{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}"
|
"{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}"
|
||||||
)
|
)
|
||||||
.as_str()
|
.as_str()
|
||||||
@@ -139,25 +139,25 @@ mod parsing {
|
|||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{:}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{:}").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{field: 0}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{field: 0}").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{0: 0}").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{0: 0}").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"0\": }").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": }").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"0\": 0 }").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": 0 }").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0 }").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0 }").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "{\"field\": \"0\" }").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": \"0\" }").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -166,20 +166,20 @@ mod parsing {
|
|||||||
let p = query_parser();
|
let p = query_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "[, 0]").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "[, 0]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "[]").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "[]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "[0]").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "[0]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "[0, 1]").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "[0, 1]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!("json({}, point{{[0]}})", "[{\"field\": 0}, {\"field\": 1}]").as_str()
|
format!("json({}, inside(point{{[0]}}))", "[{\"field\": 0}, {\"field\": 1}]").as_str()
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
@@ -190,40 +190,40 @@ mod parsing {
|
|||||||
|
|
||||||
// count ()
|
// count ()
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "count()").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "count()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "count(distinct)").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct)").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "count(.)").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "count(.)").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "count(distinct .)").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct .)").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// sum ()
|
// sum ()
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "sum()").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "sum()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "sum(.)").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "sum(.)").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// min ()
|
// min ()
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "min()").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "min()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "min(.)").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "min(.)").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// max ()
|
// max ()
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "max()").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "max()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "max(.)").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "max(.)").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -233,42 +233,42 @@ mod parsing {
|
|||||||
|
|
||||||
// Integers
|
// Integers
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "0").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "+0").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "+0").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "-0").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "-0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "1").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "1").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "+1").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "+1").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "-1").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "-1").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Floating point values
|
// Floating point values
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "0.0").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "0.0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "+0.0").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "+0.0").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "-0.0").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "-0.0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "0.1").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "0.1").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "+0.01").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "+0.01").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("json({}, point{{[0]}})", "-0.01").as_str())
|
.parse(format!("json({}, inside(point{{[0]}}))", "-0.01").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -290,7 +290,7 @@ mod parsing {
|
|||||||
|
|
||||||
assert!(p.parse("").is_err());
|
assert!(p.parse("").is_err());
|
||||||
|
|
||||||
assert!(p.parse("point{[0]}").is_ok());
|
assert!(p.parse("inside(point{[0]})").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Not useful to test this rule
|
/* Not useful to test this rule
|
||||||
@@ -305,7 +305,7 @@ mod parsing {
|
|||||||
|
|
||||||
assert!(p.parse("distinct()").is_err());
|
assert!(p.parse("distinct()").is_err());
|
||||||
|
|
||||||
assert!(p.parse("distinct(point{[0]})").is_ok());
|
assert!(p.parse("distinct(inside(point{[0]}))").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -314,7 +314,7 @@ mod parsing {
|
|||||||
|
|
||||||
assert!(p.parse("complement()").is_err());
|
assert!(p.parse("complement()").is_err());
|
||||||
|
|
||||||
assert!(p.parse("complement(point{[0]})").is_ok());
|
assert!(p.parse("complement(inside(point{[0]}))").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -322,12 +322,12 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p.parse("intersection()").is_err());
|
assert!(p.parse("intersection()").is_err());
|
||||||
assert!(p.parse("intersection(point{[0]})").is_err());
|
assert!(p.parse("intersection(inside(point{[0]}))").is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse("intersection(point{[0]}, point{[0]}, point{[0]})")
|
.parse("intersection(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
assert!(p.parse("intersection(point{[0]}, point{[0]})").is_ok());
|
assert!(p.parse("intersection(inside(point{[0]}), inside(point{[0]}))").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -335,12 +335,12 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p.parse("union()").is_err());
|
assert!(p.parse("union()").is_err());
|
||||||
assert!(p.parse("union(point{[0]})").is_err());
|
assert!(p.parse("union(inside(point{[0]}))").is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse("union(point{[0]}, point{[0]}, point{[0]})")
|
.parse("union(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
assert!(p.parse("union(point{[0]}, point{[0]})").is_ok());
|
assert!(p.parse("union(inside(point{[0]}), inside(point{[0]}))").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -348,10 +348,10 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p.parse("filter()").is_err());
|
assert!(p.parse("filter()").is_err());
|
||||||
assert!(p.parse("filter(point{[0]})").is_ok());
|
assert!(p.parse("filter(inside(point{[0]}))").is_ok());
|
||||||
assert!(p.parse("filter(=(., [0]))").is_ok());
|
assert!(p.parse("filter(=(., [0]))").is_ok());
|
||||||
|
|
||||||
assert!(p.parse("filter(=(., [0]), point{[0]})").is_ok());
|
assert!(p.parse("filter(=(., [0]), inside(point{[0]}))").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Not useful to test this rule
|
/* Not useful to test this rule
|
||||||
@@ -365,17 +365,17 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "<(., [0])").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "<(., [0])").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "<(, [0])").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "<(, [0])").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "<(.)").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "<(.)").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "<()").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "<()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -384,17 +384,17 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", ">(., [0])").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", ">(., [0])").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", ">(, [0])").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", ">(, [0])").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", ">(.)").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", ">(.)").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", ">()").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", ">()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -403,17 +403,17 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "=(., [0])").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "=(., [0])").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "=(, [0])").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "=(, [0])").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "=(.)").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "=(.)").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "=()").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "=()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -422,11 +422,11 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "!(=(., [0]))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "!(=(., [0]))").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "!()").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "!()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -435,17 +435,17 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "&(=(., [0]), =(., [0]))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "&(=(., [0]), =(., [0]))").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "&(, =(., [0]))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "&(, =(., [0]))").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "&(|(=(., [0])))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "&(|(=(., [0])))").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "&()").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "&()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -454,17 +454,17 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "|(=(., [0]), =(., [0]))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "|(=(., [0]), =(., [0]))").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "|(, =(., [0]))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "|(, =(., [0]))").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "|(|(=(., [0])))").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "|(|(=(., [0])))").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter({}, point{{[0]}})", "|()").as_str())
|
.parse(format!("filter({}, inside(point{{[0]}}))", "|()").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -474,11 +474,11 @@ mod parsing {
|
|||||||
|
|
||||||
assert!(p.parse("bag{}").is_err());
|
assert!(p.parse("bag{}").is_err());
|
||||||
|
|
||||||
assert!(p.parse("bag{point{[0]}}").is_ok());
|
assert!(p.parse("bag{inside(point{[0]})}").is_ok());
|
||||||
assert!(p.parse("bag{point{[0]}, point{[0]}}").is_ok());
|
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]})}").is_ok());
|
||||||
assert!(p.parse("bag{point{[0]}, point{[0]}, point{[0]}}").is_ok());
|
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]}), inside(point{[0]})}").is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse("bag{point{[0]}, hypersphere{[0], 1}, hyperrectangle{[0], [1]}}")
|
.parse("bag{inside(point{[0]}), inside(hypersphere{[0], 1}), inside(hyperrectangle{[0], [1]})}")
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -518,21 +518,21 @@ mod parsing {
|
|||||||
// At least two positions when it is aligned with the axis, otherwise an even number
|
// At least two positions when it is aligned with the axis, otherwise an even number
|
||||||
// of positions, as the number of vertices follows the rule 2**k, where k is the number
|
// of positions, as the number of vertices follows the rule 2**k, where k is the number
|
||||||
// of dimensions of the space containing the hyperrectangle.
|
// of dimensions of the space containing the hyperrectangle.
|
||||||
assert!(p.parse("hyperrectangle{}").is_err());
|
assert!(p.parse("inside(hyperrectangle{})").is_err());
|
||||||
assert!(p.parse("hyperrectangle{[]}").is_err());
|
assert!(p.parse("inside(hyperrectangle{[]})").is_err());
|
||||||
assert!(p.parse("hyperrectangle{[0]}").is_err());
|
assert!(p.parse("inside(hyperrectangle{[0]})").is_err());
|
||||||
assert!(p.parse("hyperrectangle{[0], [1], [2]}").is_err());
|
assert!(p.parse("inside(hyperrectangle{[0], [1], [2]})").is_err());
|
||||||
assert!(p.parse("hyperrectangle{[0], [1], [2], [3], [4]}").is_err());
|
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3], [4]})").is_err());
|
||||||
|
|
||||||
assert!(p.parse("hyperrectangle{[0], [1]}").is_ok());
|
assert!(p.parse("inside(hyperrectangle{[0], [1]})").is_ok());
|
||||||
assert!(p.parse("hyperrectangle{[0], [1], \"space\"}").is_ok());
|
assert!(p.parse("inside(hyperrectangle{[0], [1], \"space\"})").is_ok());
|
||||||
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
|
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
|
||||||
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
|
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5]}")
|
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5]})")
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"}")
|
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"})")
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -540,23 +540,23 @@ mod parsing {
|
|||||||
fn hyperrsphere() {
|
fn hyperrsphere() {
|
||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p.parse("hypersphere{}").is_err());
|
assert!(p.parse("inside(hypersphere{}").is_err());
|
||||||
assert!(p.parse("hypersphere{[]}").is_err());
|
assert!(p.parse("inside(hypersphere{[]})").is_err());
|
||||||
assert!(p.parse("hypersphere{[0]}").is_err());
|
assert!(p.parse("inside(hypersphere{[0]})").is_err());
|
||||||
|
|
||||||
assert!(p.parse("hypersphere{[0], 23}").is_ok());
|
assert!(p.parse("inside(hypersphere{[0], 23})").is_ok());
|
||||||
assert!(p.parse("hypersphere{[0], 23, \"space\"}").is_ok());
|
assert!(p.parse("inside(hypersphere{[0], 23, \"space\"})").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn point() {
|
fn point() {
|
||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p.parse("point{}").is_err());
|
assert!(p.parse("inside(point{})").is_err());
|
||||||
assert!(p.parse("point{[]}").is_err());
|
assert!(p.parse("inside(point{[]})").is_err());
|
||||||
|
|
||||||
assert!(p.parse("point{[0]}").is_ok());
|
assert!(p.parse("inside(point{[0]})").is_ok());
|
||||||
assert!(p.parse("point{[0], \"space\"}").is_ok());
|
assert!(p.parse("inside(point{[0], \"space\"})").is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -579,30 +579,30 @@ mod parsing {
|
|||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!(
|
format!(
|
||||||
"filter(=({}, [1]), point{{[0]}})",
|
"filter(=({}, [1]), inside(point{{[0]}}))",
|
||||||
"str_cmp_ignore_case(.field, \"\")"
|
"str_cmp_ignore_case(.field, \"\")"
|
||||||
)
|
)
|
||||||
.as_str()
|
.as_str()
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "[0]").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "[0]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "point{[0]}").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "inside(point{[0]})").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "{0}").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "{0}").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}*/
|
}*/
|
||||||
|
|
||||||
@@ -612,15 +612,15 @@ mod parsing {
|
|||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str()
|
format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str()
|
||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field)").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field)").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(\"\")").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(\"\")").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -631,7 +631,7 @@ mod parsing {
|
|||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!(
|
format!(
|
||||||
"filter(=({}, [1]), point{{[0]}})",
|
"filter(=({}, [1]), inside(point{{[0]}}))",
|
||||||
"str_cmp_ignore_case(.field, \"\")"
|
"str_cmp_ignore_case(.field, \"\")"
|
||||||
)
|
)
|
||||||
.as_str()
|
.as_str()
|
||||||
@@ -641,7 +641,7 @@ mod parsing {
|
|||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!(
|
format!(
|
||||||
"filter(=({}, [1]), point{{[0]}})",
|
"filter(=({}, [1]), inside(point{{[0]}}))",
|
||||||
"str_cmp_ignore_case(.field)"
|
"str_cmp_ignore_case(.field)"
|
||||||
)
|
)
|
||||||
.as_str()
|
.as_str()
|
||||||
@@ -650,7 +650,7 @@ mod parsing {
|
|||||||
assert!(p
|
assert!(p
|
||||||
.parse(
|
.parse(
|
||||||
format!(
|
format!(
|
||||||
"filter(=({}, [1]), point{{[0]}})",
|
"filter(=({}, [1]), inside(point{{[0]}}))",
|
||||||
"str_cmp_ignore_case(\"\")"
|
"str_cmp_ignore_case(\"\")"
|
||||||
)
|
)
|
||||||
.as_str()
|
.as_str()
|
||||||
@@ -663,19 +663,19 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field[1].field").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field[1].field").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field[1]").as_str())
|
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field[1]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -684,26 +684,26 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
// Empty
|
// Empty
|
||||||
assert!(p.parse(format!("point{{{}}}", "[]").as_str()).is_err());
|
assert!(p.parse(format!("inside(point{{{}}})", "[]").as_str()).is_err());
|
||||||
|
|
||||||
// Non-numerical coordinate:
|
// Non-numerical coordinate:
|
||||||
assert!(p.parse(format!("point{{{}}}", "[aa]").as_str()).is_err());
|
assert!(p.parse(format!("inside(point{{{}}})", "[aa]").as_str()).is_err());
|
||||||
|
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{{}}}", "[\"aa\"]").as_str())
|
.parse(format!("inside(point{{{}}})", "[\"aa\"]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
// One or more coordinates
|
// One or more coordinates
|
||||||
assert!(p.parse(format!("point{{{}}}", "[0]").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{{}}})", "[0]").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{{}}}", "[0, 0]").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{{}}})", "[0, 0]").as_str()).is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{{}}}", "[0, 0, 0]").as_str())
|
.parse(format!("inside(point{{{}}})", "[0, 0, 0]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{{}}}", "[0, 0, 0, 0]").as_str())
|
.parse(format!("inside(point{{{}}})", "[0, 0, 0, 0]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{{}}}", "[0,0,0,0]").as_str())
|
.parse(format!("inside(point{{{}}})", "[0,0,0,0]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -713,66 +713,66 @@ mod parsing {
|
|||||||
|
|
||||||
// Single dot
|
// Single dot
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Check first character is within allowed characters
|
// Check first character is within allowed characters
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".a").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".a").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", "._").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", "._").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".2").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".2").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
// Check second character is within allowed characters
|
// Check second character is within allowed characters
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".fa").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".fa").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f_").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f_").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Check we can add subscript
|
// Check we can add subscript
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".[23]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".[23]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[0]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[0]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[23]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[23]").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
// Invalid index values
|
// Invalid index values
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2.3]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2.3]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[02]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[02]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[-2]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[-2]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2e2]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2e2]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2E2]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2E2]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[+2]").as_str())
|
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[+2]").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -836,42 +836,42 @@ mod parsing {
|
|||||||
|
|
||||||
// Integers
|
// Integers
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "0").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "+0").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "+0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "-0").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "-0").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "1").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "1").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "+1").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "+1").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "-1").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "-1").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
// Floating point values
|
// Floating point values
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "0.0").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "0.0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "+0.0").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.0").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "-0.0").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.0").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "0.1").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "0.1").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "+0.01").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.01").as_str())
|
||||||
.is_ok());
|
.is_ok());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("hypersphere{{[0],{}}}", "-0.01").as_str())
|
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.01").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -880,20 +880,20 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
// Integers
|
// Integers
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "+0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "+0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "-0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "-0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "+1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "+1").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "-1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "-1").as_str()).is_ok());
|
||||||
|
|
||||||
// Floating point values
|
// Floating point values
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "+0.0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "-0.0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "+0.01").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.01").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "-0.01").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.01").as_str()).is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -901,54 +901,54 @@ mod parsing {
|
|||||||
let p = filters_parser();
|
let p = filters_parser();
|
||||||
|
|
||||||
// Integers
|
// Integers
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1e2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1e2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1e+2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1e+2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1e-2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1e-2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1E2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1E2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "100").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "100").as_str()).is_ok());
|
||||||
|
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "010").as_str()).is_err());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "010").as_str()).is_err());
|
||||||
|
|
||||||
// Floating point values (normalized)
|
// Floating point values (normalized)
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1e0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1e2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1e+2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e+2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1e-2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e-2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1E2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.1E23").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E23").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.01").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.01").as_str()).is_ok());
|
||||||
|
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "0.").as_str()).is_err());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "0.").as_str()).is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{[{}]}}", "0.1E03").as_str())
|
.parse(format!("inside(point{{[{}]}})", "0.1E03").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{[{}]}}", "0.1E0.3").as_str())
|
.parse(format!("inside(point{{[{}]}})", "0.1E0.3").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
// Floating point values (denormalized)
|
// Floating point values (denormalized)
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1e0").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e0").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1e2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1e+2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e+2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1e-2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e-2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1E2").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E2").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.1E23").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E23").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.01").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.01").as_str()).is_ok());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "10.1").as_str()).is_ok());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "10.1").as_str()).is_ok());
|
||||||
|
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "1.").as_str()).is_err());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "1.").as_str()).is_err());
|
||||||
assert!(p.parse(format!("point{{[{}]}}", "01.1").as_str()).is_err());
|
assert!(p.parse(format!("inside(point{{[{}]}})", "01.1").as_str()).is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{[{}]}}", "1.1E03").as_str())
|
.parse(format!("inside(point{{[{}]}})", "1.1E03").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
assert!(p
|
assert!(p
|
||||||
.parse(format!("point{{[{}]}}", "1.1E0.3").as_str())
|
.parse(format!("inside(point{{[{}]}})", "1.1E0.3").as_str())
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
21
src/types.rs
21
src/types.rs
@@ -11,23 +11,10 @@ pub enum LiteralTypes {
|
|||||||
impl PartialEq for LiteralTypes {
|
impl PartialEq for LiteralTypes {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
LiteralTypes::String => match other {
|
LiteralTypes::String => matches!(other, LiteralTypes::String),
|
||||||
LiteralTypes::String => true,
|
LiteralTypes::Int => matches!(other, LiteralTypes::Int),
|
||||||
_ => false,
|
LiteralTypes::Float => matches!(other, LiteralTypes::Float | LiteralTypes::Int),
|
||||||
},
|
LiteralTypes::Bag(_) => matches!(other, LiteralTypes::Bag(_)),
|
||||||
LiteralTypes::Int => match other {
|
|
||||||
LiteralTypes::Int => true,
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
LiteralTypes::Float => match other {
|
|
||||||
LiteralTypes::Float => true,
|
|
||||||
LiteralTypes::Int => true,
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
LiteralTypes::Bag(_) => match other {
|
|
||||||
LiteralTypes::Bag(_) => true,
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
LiteralTypes::Vector(v) => match other {
|
LiteralTypes::Vector(v) => match other {
|
||||||
LiteralTypes::Vector(ov) => {
|
LiteralTypes::Vector(ov) => {
|
||||||
let n = v.len();
|
let n = v.len();
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ impl Validator for Projection {
|
|||||||
fn validate(&self) -> ValidationResult {
|
fn validate(&self) -> ValidationResult {
|
||||||
match self {
|
match self {
|
||||||
Projection::Nifti(_, _, _) => Err("not yet implemented".to_string()),
|
Projection::Nifti(_, _, _) => Err("not yet implemented".to_string()),
|
||||||
Projection::JSON(_, _format, bag) => bag.validate(),
|
Projection::Json(_, _format, bag) => bag.validate(),
|
||||||
//FIXME: Add support for projections
|
//FIXME: Add support for projections
|
||||||
/* match format.validate() {
|
/* match format.validate() {
|
||||||
Ok(_) => bag.validate(),
|
Ok(_) => bag.validate(),
|
||||||
@@ -54,21 +54,14 @@ impl Validator for Bag {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
Bag::ViewPort(bag) => bag.validate(),
|
|
||||||
Bag::Distinct(bag) => bag.validate(),
|
Bag::Distinct(bag) => bag.validate(),
|
||||||
Bag::Filter(_, bag) => match bag {
|
Bag::Filter(_, bag) => bag.validate(),
|
||||||
None => Ok(LiteralPosition(vec![]).get_type()),
|
|
||||||
Some(b) => b.validate(),
|
|
||||||
},
|
|
||||||
Bag::Complement(bag) => bag.validate(),
|
Bag::Complement(bag) => bag.validate(),
|
||||||
Bag::Intersection(lh, rh) => compare_bag_types(lh, rh),
|
Bag::Intersection(lh, rh) => compare_bag_types(lh, rh),
|
||||||
Bag::Union(lh, rh) => compare_bag_types(lh, rh),
|
Bag::Union(lh, rh) => compare_bag_types(lh, rh),
|
||||||
Bag::Bag(bags) => {
|
Bag::Bag(bags) => {
|
||||||
for b in bags {
|
for b in bags {
|
||||||
let t = b.validate();
|
b.validate()?;
|
||||||
if t.is_err() {
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(get_type())
|
Ok(get_type())
|
||||||
|
|||||||
Reference in New Issue
Block a user