2 Commits
master ... lts

Author SHA1 Message Date
40786112d0 Updated compiler and dependencies
* Upgraded to rustc 1.80
 * Updated dependencies
 * Fixed most linter warnings
2024-08-09 19:15:37 +02:00
f68ec7af14 Fix compilation with rustc 1.4.2
More adaptation are required to be able to use the most recent version
of Rust, therefore fix the dependencies and the compiler version.
2024-08-08 12:14:21 +02:00
10 changed files with 621 additions and 658 deletions

View File

@@ -29,9 +29,6 @@ name = "parser-driver"
path = "src/main.rs" path = "src/main.rs"
required-features = ["bin"] required-features = ["bin"]
[profile.release]
lto = true
[features] [features]
bin = ["measure_time", "pretty_env_logger"] bin = ["measure_time", "pretty_env_logger"]
@@ -39,14 +36,16 @@ bin = ["measure_time", "pretty_env_logger"]
mercator_db = "0.1" mercator_db = "0.1"
lalrpop-util = "0.20" lalrpop-util = "0.20"
regex = "1.10"
# Logging macros API # Logging macros API
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] } #log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] } log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs # Used for main.rs
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
[build-dependencies] [build-dependencies]
lalrpop = "0.20" lalrpop = "0.20"

View File

@@ -1,354 +1,364 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::rc::Rc;
use mercator_db::space; use mercator_db::space;
use mercator_db::Core; use mercator_db::Core;
use mercator_db::CoreQueryParameters; use mercator_db::CoreQueryParameters;
use mercator_db::IterObjects; use mercator_db::Properties;
use mercator_db::IterObjectsBySpaces;
use super::expressions::*; use super::expressions::*;
use super::symbols::*; use super::symbols::*;
fn group_by_space<'s>( impl From<&LiteralPosition> for space::Position {
list: IterObjectsBySpaces<'s>, fn from(literal: &LiteralPosition) -> Self {
) -> Box<dyn Iterator<Item = (&'s String, IterObjects<'s>)> + 's> { let v: Vec<f64> = literal.into();
// Filter per Properties, in order to regroup by it, then build v.into()
// a single SpatialObject per Properties.
let mut hashmap = HashMap::new();
for (space, objects) in list {
hashmap.entry(space).or_insert_with(Vec::new).push(objects);
}
Box::new(hashmap.into_iter().map(|(space, objects)| {
let objects: IterObjects = Box::new(objects.into_iter().flatten());
(space, objects)
}))
}
fn distinct_helper(list: IterObjectsBySpaces) -> IterObjectsBySpaces {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(list)
// We would lose some objects otherwise when creating the
// HashMaps. Also this makes sure to keep the values are unique.
.map(|(space, iter)| {
let uniques: HashSet<_> = iter.collect();
let uniques: IterObjects = Box::new(uniques.into_iter());
(space, uniques)
})
.collect()
}
fn into_positions_hashset(
objects_by_spaces: IterObjectsBySpaces,
) -> HashMap<&String, Rc<HashSet<space::Position>>> {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(objects_by_spaces)
// We would lose some objects otherwise when creating the HashSets.
.map(|(space, iter)| {
let hash_set: HashSet<_> = iter.map(|(position, _)| position).collect();
(space, Rc::new(hash_set))
})
.collect::<HashMap<_, _>>()
}
// Strictly not inside nor on the surface.
// TODO: inside must contains the valid positions in all expected spaces
fn complement_helper<'h>(
core: &'h Core,
parameters: &'h CoreQueryParameters<'h>,
space_id: &'h str,
inside: IterObjectsBySpaces<'h>,
) -> mercator_db::ResultSet<'h> {
let (low, high) = parameters.db.space(space_id)?.bounding_box();
let inside = into_positions_hashset(inside);
let points = core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
let results = points
.into_iter()
.filter_map(move |(space, v)| match inside.get(space) {
None => None, // Space not found, so no point might exist!
Some(volume) => {
let volume = volume.clone();
let iter: IterObjects = Box::new(v.filter(move |a| !volume.contains(&a.0)));
Some((space, iter))
}
})
.collect();
Ok(results)
}
// Intersection based only on spatial positions!
fn intersect_helper<'h>(
smaller: IterObjectsBySpaces<'h>,
bigger: IterObjectsBySpaces<'h>,
) -> IterObjectsBySpaces<'h> {
let smaller = into_positions_hashset(smaller);
bigger
.into_iter()
.filter_map(
move |(space, bigger_object_iter)| match smaller.get(space) {
None => None,
Some(volume) => {
let volume = volume.clone();
let filtered: IterObjects =
Box::new(bigger_object_iter.filter(move |a| volume.contains(&a.0)));
Some((space, filtered))
}
},
)
.collect()
}
impl Bag {
fn distinct<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
Ok(distinct_helper(results))
}
fn complement<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
core: &'b Core,
) -> mercator_db::ResultSet<'b> {
let inside = self.execute(core_id, parameters)?;
// FIXME: The complement of a set should be computed within its
// definition space. We don't know here so we use universe
complement_helper(
core,
parameters,
mercator_db::space::Space::universe().name(),
inside,
)
}
fn intersection<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let left = self.execute(core_id, parameters)?;
let right = rh.execute(core_id, parameters)?;
let v = if rh.predict(parameters.db) < self.predict(parameters.db) {
intersect_helper(right, left)
} else {
intersect_helper(left, right)
};
Ok(v)
}
fn union<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let mut left = self.execute(core_id, parameters)?;
let mut right = rh.execute(core_id, parameters)?;
let union = if rh.predict(parameters.db) < self.predict(parameters.db) {
left.append(&mut right);
left
} else {
right.append(&mut left);
right
};
Ok(union)
}
fn filter<'b>(
&'b self,
predicate: &'b Predicate,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
Ok(results
.into_iter()
.map(move |(space, positions)| {
let positions = positions.collect::<Vec<_>>();
(
space,
Box::new(positions.into_iter().filter(move |(position, properties)| {
predicate.eval((space, position, properties))
})) as IterObjects,
)
})
.collect())
} }
} }
impl Shape { impl From<&LiteralNumber> for space::Coordinate {
fn inside<'s>( fn from(literal: &LiteralNumber) -> Self {
&'s self, match literal {
parameters: &'s CoreQueryParameters<'s>, LiteralNumber::Float(f) => (*f).into(),
core: &'s Core, LiteralNumber::Int(i) => (*i as u64).into(),
) -> mercator_db::ResultSet<'s> {
let db = parameters.db;
let param = match self {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
//FIXME: Support arbitrary HyperRectangles
Err(
"The number of position is different from 2, which is unsupported."
.to_string(),
)
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
// We have to provide a position with all the dimensions
// for the encoding to work as expected.
let mut r = vec![0f64; position.dimensions()];
r[0] = radius.into();
let radius = space.encode(&r)?[0];
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, shape, space_id),
Err(e) => Err(e),
} }
} }
}
fn outside<'s>( fn complement_helper<'c>(
&'s self, core: &'c Core,
parameters: &'s CoreQueryParameters<'s>, parameters: &CoreQueryParameters<'c>,
core: &'s Core, space_id: &str,
) -> mercator_db::ResultSet<'s> { inside: Vec<(&'c String, Vec<(space::Position, &'c Properties)>)>,
let (space_id, inside) = match self { ) -> mercator_db::ResultSet<'c> {
Shape::Point(space_id, position) => { let (low, high) = parameters.db.space(space_id)?.bounding_box();
let position: Vec<f64> = position.into(); match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
let positions = vec![position.into()]; e @ Err(_) => e,
let inside = core.get_by_positions(parameters, positions, space_id)?; Ok(points) => {
let hashmap = inside.into_iter().collect::<HashMap<_, _>>();
Ok((space_id, inside)) Ok(points
} .into_iter()
Shape::HyperRectangle(space_id, bounding_box) => { .filter_map(|(space, v)|
// We need to adapt the bounding_box to ensure the hashmap.get(space).map(|list|
// surface will not hit as part of the inside set, so we (space, v.into_iter().filter(|t|
// compute the biggest bounding box contained within the !list.contains(t)).collect())))
// given box. .collect::<Vec<_>>())
}
}
}
// Smallest increment possible fn view_port<'c>(
let mut increment = Vec::with_capacity(bounding_box[0].dimensions()); core_id: &str,
for _ in 0..bounding_box[0].dimensions() { parameters: &CoreQueryParameters<'c>,
increment.push(f64::EPSILON); bag: &Bag,
) -> mercator_db::ResultSet<'c> {
if let Some((low, high)) = parameters.view_port {
let vp = Bag::Inside(Shape::HyperRectangle(
bag.space().clone(),
vec![low.into(), high.into()],
));
intersection(core_id, parameters, &vp, bag)
} else {
bag.execute(core_id, parameters)
}
}
fn distinct<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(mut v) => {
let set: HashSet<_> = v.drain(..).collect(); // dedup
v.extend(set);
Ok(v)
}
}
}
fn filter_helper<'c>(
predicate: &Predicate,
bag: &Bag,
core_id: &str,
parameters: &CoreQueryParameters<'c>,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(results) => Ok(results
.into_iter()
.filter_map(|(space, positions)| {
let filtered = positions
.into_iter()
.filter(|(position, properties)| predicate.eval((space, position, properties)))
.collect::<Vec<_>>();
if filtered.is_empty() {
None
} else {
Some((space, filtered))
} }
})
// Add it to the lower bound .collect::<Vec<_>>()),
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
let inside =
core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
Ok((space_id, inside))
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= f64::EPSILON;
let center: space::Position = center.into();
let inside = core.get_by_shape(
parameters,
space::Shape::HyperSphere(center, radius.into()),
space_id,
)?;
Ok((space_id, inside))
}
Shape::Label(space_id, id) => {
let inside = core.get_by_label(parameters, id)?;
Ok((space_id, inside))
}
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}?;
complement_helper(core, parameters, space_id, inside)
} }
} }
fn filter<'c>( fn filter<'c>(
core_id: &'c str, core_id: &str,
parameters: &'c CoreQueryParameters<'c>, parameters: &CoreQueryParameters<'c>,
predicate: &'c Option<Predicate>, predicate: &Option<Predicate>,
bag: &'c Bag, bag: &Option<Box<Bag>>,
) -> mercator_db::ResultSet<'c> { ) -> mercator_db::ResultSet<'c> {
match predicate { match predicate {
None => bag.execute(core_id, parameters), None => {
Some(predicate) => bag.filter(predicate, core_id, parameters), if let Some(bag) = bag {
bag.execute(core_id, parameters)
} else {
Err("Filter without predicate nor data set.".to_string())
}
}
Some(predicate) => match bag {
None => {
let (low, high) = space::Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = Shape::HyperRectangle(
space::Space::universe().name().clone(),
vec![
LiteralPosition(
low.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
LiteralPosition(
high.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
],
);
filter_helper(predicate, &Bag::Inside(shape), core_id, parameters)
}
Some(bag) => filter_helper(predicate, bag.as_ref(), core_id, parameters),
},
}
}
fn complement<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
// FIXME: The complement of a set is computed within its definition space.
e @ Err(_) => e,
Ok(inside) => complement_helper(
core,
parameters,
mercator_db::space::Space::universe().name(),
inside,
),
}
}
fn intersection<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet<'c> {
let l = lh.execute(core_id, parameters);
if let Ok(l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(r) = r {
let mut v = vec![];
if rh.predict(parameters.db) < lh.predict(parameters.db) {
for o in r {
if l.contains(&o) {
v.push(o);
}
}
} else {
for o in l {
if r.contains(&o) {
v.push(o);
}
}
}
Ok(v)
} else {
r
}
} else {
l
}
}
fn union<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet<'c> {
let l = lh.execute(core_id, parameters);
if let Ok(mut l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(mut r) = r {
if rh.predict(parameters.db) < lh.predict(parameters.db) {
l.append(&mut r);
Ok(l)
} else {
r.append(&mut l);
Ok(r)
}
} else {
r
}
} else {
l
} }
} }
fn bag<'c>( fn bag<'c>(
core_id: &'c str, core_id: &str,
parameters: &'c CoreQueryParameters<'c>, parameters: &CoreQueryParameters<'c>,
bags: &'c [Bag], bags: &[Bag],
) -> mercator_db::ResultSet<'c> { ) -> mercator_db::ResultSet<'c> {
let mut results = Vec::new(); let mut v = vec![];
for bag in bags { for bag in bags {
let mut result = bag.execute(core_id, parameters)?; let b = bag.execute(core_id, parameters);
results.append(&mut result); match b {
e @ Err(_) => {
return e;
}
Ok(mut b) => {
v.append(&mut b);
}
}
} }
Ok(results) Ok(v)
}
fn inside<'c>(
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
shape: &Shape,
) -> mercator_db::ResultSet<'c> {
let db = parameters.db;
let param = match shape {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
Err("The number of position is different from 2, which is unsupported.".to_string())
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
let mut r = vec![];
for _ in 0..position.dimensions() {
r.push(radius.into());
}
let radius = space.encode(&r)?[0];
//FIXME: RADIUS IS A LENGTH, HOW TO ENCODE IT INTO THE SPACE?
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, &shape, space_id),
Err(e) => Err(e),
}
}
fn outside<'c>(
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
shape: &Shape,
) -> mercator_db::ResultSet<'c> {
match shape {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
match core.get_by_positions(parameters, &[position.into()], space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(f64::EPSILON);
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= f64::EPSILON;
let center: space::Position = center.into();
match core.get_by_shape(
parameters,
&space::Shape::HyperSphere(center, radius.into()),
space_id,
) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::Label(_, _) => Err("Label: not yet implemented".to_string()),
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}
} }
impl<'e> Executor<'e> for Projection { impl<'e> Executor<'e> for Projection {
type ResultSet = mercator_db::ResultSet<'e>; type ResultSet = mercator_db::ResultSet<'e>;
fn execute( fn execute<'f: 'e>(
&'e self, &self,
core_id: &'e str, core_id: &str,
parameters: &'e CoreQueryParameters<'e>, parameters: &CoreQueryParameters<'f>,
) -> Self::ResultSet { ) -> Self::ResultSet {
match self { match self {
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()), Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
@@ -363,26 +373,27 @@ impl<'e> Executor<'e> for Projection {
impl<'e> Executor<'e> for Bag { impl<'e> Executor<'e> for Bag {
type ResultSet = mercator_db::ResultSet<'e>; type ResultSet = mercator_db::ResultSet<'e>;
fn execute( fn execute<'f: 'e>(
&'e self, &self,
core_id: &'e str, core_id: &str,
parameters: &'e CoreQueryParameters<'e>, parameters: &CoreQueryParameters<'f>,
) -> Self::ResultSet { ) -> Self::ResultSet {
let core = parameters.db.core(core_id)?; let core = parameters.db.core(core_id)?;
match self { match self {
Bag::Distinct(bag) => bag.distinct(core_id, parameters), Bag::ViewPort(bag) => view_port(core_id, parameters, bag),
Bag::Distinct(bag) => distinct(core_id, parameters, bag),
Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag), Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag),
Bag::Complement(bag) => bag.complement(core_id, parameters, core), Bag::Complement(bag) => complement(core_id, parameters, core, bag),
Bag::Intersection(lh, rh) => lh.intersection(core_id, parameters, rh), Bag::Intersection(lh, rh) => intersection(core_id, parameters, rh, lh),
Bag::Union(lh, rh) => lh.union(core_id, parameters, rh), Bag::Union(lh, rh) => union(core_id, parameters, rh, lh),
Bag::Bag(list) => bag(core_id, parameters, list), Bag::Bag(list) => bag(core_id, parameters, list),
Bag::Inside(shape) => shape.inside(parameters, core), Bag::Inside(shape) => inside(parameters, core, shape),
Bag::Outside(shape) => { Bag::Outside(shape) => {
//FIXME: This is currently computed as the complement of the values within the shape, except its surface. //FIXME: This is currently computed as the complement of the values within the shape, except its surface.
// Should this be instead a list of positions within the shape? // Should this be instead a list of positions within the shape?
//FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested? //FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested?
shape.outside(parameters, core) outside(parameters, core, shape)
} }
} }
} }

View File

@@ -14,10 +14,10 @@ pub trait Predictor {
pub trait Executor<'e> { pub trait Executor<'e> {
type ResultSet; type ResultSet;
fn execute( fn execute<'f: 'e>(
&'e self, &self,
core_id: &'e str, core_id: &str,
parameters: &'e CoreQueryParameters<'e>, parameters: &CoreQueryParameters<'f>,
) -> Self::ResultSet; ) -> Self::ResultSet;
} }

View File

@@ -59,8 +59,6 @@ pub use expressions::Predictor;
pub use expressions::Validator; pub use expressions::Validator;
pub use queries::FiltersParser; pub use queries::FiltersParser;
pub use queries::QueryParser; pub use queries::QueryParser;
pub use symbols::Bag;
pub use symbols::Projection;
pub use validators::ValidationResult; pub use validators::ValidationResult;
#[cfg(test)] #[cfg(test)]

View File

@@ -93,17 +93,12 @@ fn main() {
execute = t.execute(core, &parameters); execute = t.execute(core, &parameters);
} }
match execute { if let Ok(r) = execute {
Ok(r) => { //let r = mercator_db::json::model::to_spatial_objects(r);
let r = r info!("Execution: \n{:#?}", r);
.into_iter() info!("NB results: {:?}", r.len());
.map(|(space, objects)| (space, objects.collect::<Vec<_>>())) } else {
.collect::<Vec<_>>(); info!("Execution: \n{:?}", execute);
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r[0].1.len());
}
Err(e) => info!("Execution: \n{:?}", e),
} }
} }
} }

View File

@@ -1,3 +1,4 @@
use mercator_db::space;
use mercator_db::DataBase; use mercator_db::DataBase;
use super::expressions::Predictor; use super::expressions::Predictor;
@@ -15,8 +16,12 @@ impl Predictor for Projection {
impl Predictor for Bag { impl Predictor for Bag {
fn predict(&self, db: &DataBase) -> Result<f64, String> { fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self { match self {
Bag::ViewPort(bag) => bag.predict(db),
Bag::Distinct(bag) => bag.predict(db), Bag::Distinct(bag) => bag.predict(db),
Bag::Filter(_, bag) => bag.predict(db), Bag::Filter(_, bag) => match bag {
None => Ok(db.space(space::Space::universe().name())?.volume()),
Some(b) => b.predict(db),
},
Bag::Complement(bag) => Ok(db.space(bag.space())?.volume() - bag.predict(db)?), Bag::Complement(bag) => Ok(db.space(bag.space())?.volume() - bag.predict(db)?),
Bag::Intersection(lh, rh) => { Bag::Intersection(lh, rh) => {
let l = lh.predict(db)?; let l = lh.predict(db)?;

View File

@@ -137,6 +137,8 @@ Aggregations: symbols::Aggregation = {
//*********************************************************************/ //*********************************************************************/
pub Filters: symbols::Bag = { pub Filters: symbols::Bag = {
<Bags> <Bags>
//<Bags> =>
// symbols::Bag::ViewPort(Box::new(<>))
}; };
// All these expressions generate bags. // All these expressions generate bags.
@@ -187,25 +189,13 @@ Union: symbols::Bag = {
Filter: symbols::Bag = { Filter: symbols::Bag = {
// "filter" "(" <p:Predicates> "," <b:Bags> ")" => // "filter" "(" <p:Predicates> "," <b:Bags> ")" =>
"filter" "(" <b:Bags> ")" => "filter" "(" <b:Bags> ")" =>
symbols::Bag::Filter(None, Box::new(b)), symbols::Bag::Filter(None, Some(Box::new(b))),
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" => { "filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" =>
match b { match b {
None => { None => symbols::Bag::Filter(Some(p), None),
let (low, high) = Space::universe().bounding_box(); Some(b) => symbols::Bag::Filter(Some(p), Some(Box::new(b))),
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = symbols::Shape::HyperRectangle(
Space::universe().name().clone(),
vec![
symbols::LiteralPosition(low.into_iter().map(symbols::LiteralNumber::Float).collect()),
symbols::LiteralPosition(high.into_iter().map(symbols::LiteralNumber::Float).collect()),
],
);
symbols::Bag::Filter(Some(p), Box::new(symbols::Bag::Inside(shape)))
}
Some(b) => symbols::Bag::Filter(Some(p), Box::new(b)),
} }
},
}; };
Predicates: symbols::Predicate = { Predicates: symbols::Predicate = {

View File

@@ -57,9 +57,11 @@ struct Transform {
/**********************************************************************/ /**********************************************************************/
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Bag { pub enum Bag {
// This is an implicit operator, inserted by the parser. Never to be used directly.
ViewPort(Box<Bag>),
// Bags // Bags
Distinct(Box<Bag>), Distinct(Box<Bag>),
Filter(Option<Predicate>, Box<Bag>), Filter(Option<Predicate>, Option<Box<Bag>>),
Complement(Box<Bag>), Complement(Box<Bag>),
Intersection(Box<Bag>, Box<Bag>), Intersection(Box<Bag>, Box<Bag>),
Union(Box<Bag>, Box<Bag>), Union(Box<Bag>, Box<Bag>),
@@ -73,8 +75,12 @@ pub enum Bag {
impl Bag { impl Bag {
pub fn space(&self) -> &String { pub fn space(&self) -> &String {
match self { match self {
Bag::ViewPort(bag) => bag.space(),
Bag::Distinct(bag) => bag.space(), Bag::Distinct(bag) => bag.space(),
Bag::Filter(_, bag) => bag.space(), Bag::Filter(_, bag) => match bag {
None => space::Space::universe().name(),
Some(b) => b.space(),
},
Bag::Complement(bag) => bag.space(), Bag::Complement(bag) => bag.space(),
Bag::Intersection(lh, _) => { Bag::Intersection(lh, _) => {
// We are assuming lh and rh are in the same space. // We are assuming lh and rh are in the same space.
@@ -237,9 +243,7 @@ impl Position {
Ordering::Greater => 1, Ordering::Greater => 1,
Ordering::Less => -1, Ordering::Less => -1,
}; };
LiteralPosition(vec![LiteralNumber::Int(x)])
let v = vec![LiteralNumber::Int(x)];
LiteralPosition(v)
} }
} }
} }
@@ -258,33 +262,14 @@ pub enum LiteralNumber {
Float(f64), Float(f64),
} }
impl From<&LiteralNumber> for f64 { impl From<&LiteralNumber> for Vec<f64> {
fn from(l: &LiteralNumber) -> Self { fn from(l: &LiteralNumber) -> Self {
match l { let r = match l {
LiteralNumber::Int(x) => (*x) as f64, LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x, LiteralNumber::Float(x) => *x,
} };
}
}
impl From<LiteralNumber> for f64 { vec![r]
fn from(l: LiteralNumber) -> Self {
(&l).into()
}
}
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
}
impl From<LiteralNumber> for space::Coordinate {
fn from(literal: LiteralNumber) -> Self {
(&literal).into()
} }
} }
@@ -309,7 +294,7 @@ pub struct LiteralPosition(pub Vec<LiteralNumber>);
impl LiteralPosition { impl LiteralPosition {
pub fn get_type(&self) -> LiteralTypes { pub fn get_type(&self) -> LiteralTypes {
let Self(v) = self; let Self(v) = self;
let mut t = Vec::with_capacity(v.len()); let mut t = Vec::new();
for n in v { for n in v {
t.push(match n { t.push(match n {
@@ -342,35 +327,34 @@ impl LiteralPosition {
} }
} }
impl From<&LiteralPosition> for Vec<f64> { impl From<&LiteralNumber> for f64 {
fn from(l: &LiteralPosition) -> Self { fn from(l: &LiteralNumber) -> Self {
// Speed-wise this should be the same, the downside is the newly match l {
// allocated vector might be suboptimal in terms of space. LiteralNumber::Int(x) => (*x) as f64,
//let LiteralPosition(v) = l; LiteralNumber::Float(x) => *x,
//v.iter().map(|literal| literal.into()).collect()
let LiteralPosition(v) = l;
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(value.into());
} }
lv
} }
} }
impl From<LiteralPosition> for Vec<f64> { impl From<&LiteralPosition> for Vec<f64> {
fn from(l: LiteralPosition) -> Self { fn from(l: &LiteralPosition) -> Self {
(&l).into() let LiteralPosition(v) = l;
let mut r = Vec::with_capacity(v.len());
for x in v {
let x = match x {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
};
r.push(x);
}
r
} }
} }
impl From<&Vec<f64>> for LiteralPosition { impl From<&Vec<f64>> for LiteralPosition {
fn from(v: &Vec<f64>) -> Self { fn from(v: &Vec<f64>) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//LiteralPosition(v.iter().map(|value| LiteralNumber::Float(*value)).collect())
let mut lv = Vec::with_capacity(v.len()); let mut lv = Vec::with_capacity(v.len());
for value in v { for value in v {
lv.push(LiteralNumber::Float(*value)); lv.push(LiteralNumber::Float(*value));
@@ -379,36 +363,10 @@ impl From<&Vec<f64>> for LiteralPosition {
LiteralPosition(lv) LiteralPosition(lv)
} }
} }
impl From<Vec<f64>> for LiteralPosition {
fn from(v: Vec<f64>) -> Self {
(&v).into()
}
}
impl From<&space::Position> for LiteralPosition { impl From<&space::Position> for LiteralPosition {
fn from(position: &space::Position) -> Self { fn from(position: &space::Position) -> Self {
let position: Vec<f64> = position.into(); let lv: Vec<f64> = position.into();
position.into() (&lv).into()
}
}
impl From<space::Position> for LiteralPosition {
fn from(position: space::Position) -> Self {
(&position).into()
}
}
impl From<&LiteralPosition> for space::Position {
fn from(position: &LiteralPosition) -> Self {
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<LiteralPosition> for space::Position {
fn from(position: LiteralPosition) -> Self {
(&position).into()
} }
} }

View File

@@ -16,7 +16,7 @@ mod parsing {
fn query() { fn query() {
let p = query_parser(); let p = query_parser();
let nifti = "nifti(inside(point{[0]}))"; let nifti = "nifti(point{[0]})";
// Option is Empty // Option is Empty
assert!(p.parse("").is_ok()); assert!(p.parse("").is_ok());
@@ -47,14 +47,14 @@ mod parsing {
let p = query_parser(); let p = query_parser();
// Check allowed forms of the operator // Check allowed forms of the operator
assert!(p.parse("nifti(inside(point{[0]}))").is_ok()); assert!(p.parse("nifti(point{[0]})").is_ok());
assert!(p.parse("nifti(.properties.id, inside(point{[0]}))").is_ok()); assert!(p.parse("nifti(.properties.id, point{[0]})").is_ok());
unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE. unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE.
//FIXME: THIS SHOULD BE ALLOWED //FIXME: THIS SHOULD BE ALLOWED
assert!(p.parse("nifti(2, inside(point{[0]}))").is_ok()); assert!(p.parse("nifti(2, point{[0]})").is_ok());
assert!(p.parse("nifti(2.23, inside(point{[0]}))").is_ok()); assert!(p.parse("nifti(2.23, point{[0]})").is_ok());
//FIXME: SYNTAX OK, TYPE NOT //FIXME: SYNTAX OK, TYPE NOT
assert!(p.parse("nifti(point{[0], \"space\"})").is_err()); assert!(p.parse("nifti(point{[0], \"space\"})").is_err());
@@ -64,16 +64,16 @@ mod parsing {
fn json_operator() { fn json_operator() {
let p = query_parser(); let p = query_parser();
assert!(p.parse("json(true, inside(point{[0]}))").is_ok()); assert!(p.parse("json(true, point{[0]})").is_ok());
assert!(p.parse("json(23, inside(point{[0]}))").is_ok()); assert!(p.parse("json(23, point{[0]})").is_ok());
assert!(p.parse("json([23, 24], inside(point{[0]}))").is_ok()); assert!(p.parse("json([23, 24], point{[0]})").is_ok());
assert!(p.parse("json([23, count(.)], inside(point{[0]}))").is_ok()); assert!(p.parse("json([23, count(.)], point{[0]})").is_ok());
assert!(p.parse("json(true)").is_err()); assert!(p.parse("json(true)").is_err());
assert!(p.parse("json(true,)").is_err()); assert!(p.parse("json(true,)").is_err());
assert!(p.parse("json(, inside(point{[0]}))").is_err()); assert!(p.parse("json(, point{[0]})").is_err());
assert!(p.parse("json(inside(point{[0]}))").is_err()); assert!(p.parse("json(point{[0]})").is_err());
assert!(p.parse("json(true, point)").is_err()); assert!(p.parse("json(true, point)").is_err());
} }
@@ -83,24 +83,24 @@ mod parsing {
let p = query_parser(); let p = query_parser();
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "true").as_str()) .parse(format!("json({}, point{{[0]}})", "true").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "false").as_str()) .parse(format!("json({}, point{{[0]}})", "false").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "null").as_str()) .parse(format!("json({}, point{{[0]}})", "null").as_str())
.is_ok()); .is_ok());
// Incorrect capitalisation // Incorrect capitalisation
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "True").as_str()) .parse(format!("json({}, point{{[0]}})", "True").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "False").as_str()) .parse(format!("json({}, point{{[0]}})", "False").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "Null").as_str()) .parse(format!("json({}, point{{[0]}})", "Null").as_str())
.is_err()); .is_err());
} }
@@ -109,24 +109,24 @@ mod parsing {
let p = query_parser(); let p = query_parser();
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{}").as_str()) .parse(format!("json({}, point{{[0]}})", "{}").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0}").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"field\": 0}").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0, \"field1\": 1}").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"field\": 0, \"field1\": 1}").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": [0, 1]}").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"field\": [0, 1]}").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": {\"field1\": 0}}").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"field\": {\"field1\": 0}}").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse( .parse(
format!( format!(
"json({}, inside(point{{[0]}}))", "json({}, point{{[0]}})",
"{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}" "{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}"
) )
.as_str() .as_str()
@@ -139,25 +139,25 @@ mod parsing {
let p = query_parser(); let p = query_parser();
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{:}").as_str()) .parse(format!("json({}, point{{[0]}})", "{:}").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{field: 0}").as_str()) .parse(format!("json({}, point{{[0]}})", "{field: 0}").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{0: 0}").as_str()) .parse(format!("json({}, point{{[0]}})", "{0: 0}").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": }").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"0\": }").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": 0 }").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"0\": 0 }").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0 }").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"field\": 0 }").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": \"0\" }").as_str()) .parse(format!("json({}, point{{[0]}})", "{\"field\": \"0\" }").as_str())
.is_ok()); .is_ok());
} }
@@ -166,20 +166,20 @@ mod parsing {
let p = query_parser(); let p = query_parser();
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[, 0]").as_str()) .parse(format!("json({}, point{{[0]}})", "[, 0]").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[]").as_str()) .parse(format!("json({}, point{{[0]}})", "[]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[0]").as_str()) .parse(format!("json({}, point{{[0]}})", "[0]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[0, 1]").as_str()) .parse(format!("json({}, point{{[0]}})", "[0, 1]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse( .parse(
format!("json({}, inside(point{{[0]}}))", "[{\"field\": 0}, {\"field\": 1}]").as_str() format!("json({}, point{{[0]}})", "[{\"field\": 0}, {\"field\": 1}]").as_str()
) )
.is_ok()); .is_ok());
} }
@@ -190,40 +190,40 @@ mod parsing {
// count () // count ()
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count()").as_str()) .parse(format!("json({}, point{{[0]}})", "count()").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct)").as_str()) .parse(format!("json({}, point{{[0]}})", "count(distinct)").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count(.)").as_str()) .parse(format!("json({}, point{{[0]}})", "count(.)").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct .)").as_str()) .parse(format!("json({}, point{{[0]}})", "count(distinct .)").as_str())
.is_ok()); .is_ok());
// sum () // sum ()
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "sum()").as_str()) .parse(format!("json({}, point{{[0]}})", "sum()").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "sum(.)").as_str()) .parse(format!("json({}, point{{[0]}})", "sum(.)").as_str())
.is_ok()); .is_ok());
// min () // min ()
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "min()").as_str()) .parse(format!("json({}, point{{[0]}})", "min()").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "min(.)").as_str()) .parse(format!("json({}, point{{[0]}})", "min(.)").as_str())
.is_ok()); .is_ok());
// max () // max ()
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "max()").as_str()) .parse(format!("json({}, point{{[0]}})", "max()").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "max(.)").as_str()) .parse(format!("json({}, point{{[0]}})", "max(.)").as_str())
.is_ok()); .is_ok());
} }
@@ -233,42 +233,42 @@ mod parsing {
// Integers // Integers
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "0").as_str()) .parse(format!("json({}, point{{[0]}})", "0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+0").as_str()) .parse(format!("json({}, point{{[0]}})", "+0").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-0").as_str()) .parse(format!("json({}, point{{[0]}})", "-0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "1").as_str()) .parse(format!("json({}, point{{[0]}})", "1").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+1").as_str()) .parse(format!("json({}, point{{[0]}})", "+1").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-1").as_str()) .parse(format!("json({}, point{{[0]}})", "-1").as_str())
.is_ok()); .is_ok());
// Floating point values // Floating point values
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "0.0").as_str()) .parse(format!("json({}, point{{[0]}})", "0.0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+0.0").as_str()) .parse(format!("json({}, point{{[0]}})", "+0.0").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-0.0").as_str()) .parse(format!("json({}, point{{[0]}})", "-0.0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "0.1").as_str()) .parse(format!("json({}, point{{[0]}})", "0.1").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+0.01").as_str()) .parse(format!("json({}, point{{[0]}})", "+0.01").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-0.01").as_str()) .parse(format!("json({}, point{{[0]}})", "-0.01").as_str())
.is_ok()); .is_ok());
} }
} }
@@ -290,7 +290,7 @@ mod parsing {
assert!(p.parse("").is_err()); assert!(p.parse("").is_err());
assert!(p.parse("inside(point{[0]})").is_ok()); assert!(p.parse("point{[0]}").is_ok());
} }
/* Not useful to test this rule /* Not useful to test this rule
@@ -305,7 +305,7 @@ mod parsing {
assert!(p.parse("distinct()").is_err()); assert!(p.parse("distinct()").is_err());
assert!(p.parse("distinct(inside(point{[0]}))").is_ok()); assert!(p.parse("distinct(point{[0]})").is_ok());
} }
#[test] #[test]
@@ -314,7 +314,7 @@ mod parsing {
assert!(p.parse("complement()").is_err()); assert!(p.parse("complement()").is_err());
assert!(p.parse("complement(inside(point{[0]}))").is_ok()); assert!(p.parse("complement(point{[0]})").is_ok());
} }
#[test] #[test]
@@ -322,12 +322,12 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p.parse("intersection()").is_err()); assert!(p.parse("intersection()").is_err());
assert!(p.parse("intersection(inside(point{[0]}))").is_err()); assert!(p.parse("intersection(point{[0]})").is_err());
assert!(p assert!(p
.parse("intersection(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))") .parse("intersection(point{[0]}, point{[0]}, point{[0]})")
.is_err()); .is_err());
assert!(p.parse("intersection(inside(point{[0]}), inside(point{[0]}))").is_ok()); assert!(p.parse("intersection(point{[0]}, point{[0]})").is_ok());
} }
#[test] #[test]
@@ -335,12 +335,12 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p.parse("union()").is_err()); assert!(p.parse("union()").is_err());
assert!(p.parse("union(inside(point{[0]}))").is_err()); assert!(p.parse("union(point{[0]})").is_err());
assert!(p assert!(p
.parse("union(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))") .parse("union(point{[0]}, point{[0]}, point{[0]})")
.is_err()); .is_err());
assert!(p.parse("union(inside(point{[0]}), inside(point{[0]}))").is_ok()); assert!(p.parse("union(point{[0]}, point{[0]})").is_ok());
} }
#[test] #[test]
@@ -348,10 +348,10 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p.parse("filter()").is_err()); assert!(p.parse("filter()").is_err());
assert!(p.parse("filter(inside(point{[0]}))").is_ok()); assert!(p.parse("filter(point{[0]})").is_ok());
assert!(p.parse("filter(=(., [0]))").is_ok()); assert!(p.parse("filter(=(., [0]))").is_ok());
assert!(p.parse("filter(=(., [0]), inside(point{[0]}))").is_ok()); assert!(p.parse("filter(=(., [0]), point{[0]})").is_ok());
} }
/* Not useful to test this rule /* Not useful to test this rule
@@ -365,17 +365,17 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<(., [0])").as_str()) .parse(format!("filter({}, point{{[0]}})", "<(., [0])").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<(, [0])").as_str()) .parse(format!("filter({}, point{{[0]}})", "<(, [0])").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<(.)").as_str()) .parse(format!("filter({}, point{{[0]}})", "<(.)").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<()").as_str()) .parse(format!("filter({}, point{{[0]}})", "<()").as_str())
.is_err()); .is_err());
} }
@@ -384,17 +384,17 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">(., [0])").as_str()) .parse(format!("filter({}, point{{[0]}})", ">(., [0])").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">(, [0])").as_str()) .parse(format!("filter({}, point{{[0]}})", ">(, [0])").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">(.)").as_str()) .parse(format!("filter({}, point{{[0]}})", ">(.)").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">()").as_str()) .parse(format!("filter({}, point{{[0]}})", ">()").as_str())
.is_err()); .is_err());
} }
@@ -403,17 +403,17 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=(., [0])").as_str()) .parse(format!("filter({}, point{{[0]}})", "=(., [0])").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=(, [0])").as_str()) .parse(format!("filter({}, point{{[0]}})", "=(, [0])").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=(.)").as_str()) .parse(format!("filter({}, point{{[0]}})", "=(.)").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=()").as_str()) .parse(format!("filter({}, point{{[0]}})", "=()").as_str())
.is_err()); .is_err());
} }
@@ -422,11 +422,11 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "!(=(., [0]))").as_str()) .parse(format!("filter({}, point{{[0]}})", "!(=(., [0]))").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "!()").as_str()) .parse(format!("filter({}, point{{[0]}})", "!()").as_str())
.is_err()); .is_err());
} }
@@ -435,17 +435,17 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&(=(., [0]), =(., [0]))").as_str()) .parse(format!("filter({}, point{{[0]}})", "&(=(., [0]), =(., [0]))").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&(, =(., [0]))").as_str()) .parse(format!("filter({}, point{{[0]}})", "&(, =(., [0]))").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&(|(=(., [0])))").as_str()) .parse(format!("filter({}, point{{[0]}})", "&(|(=(., [0])))").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&()").as_str()) .parse(format!("filter({}, point{{[0]}})", "&()").as_str())
.is_err()); .is_err());
} }
@@ -454,17 +454,17 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|(=(., [0]), =(., [0]))").as_str()) .parse(format!("filter({}, point{{[0]}})", "|(=(., [0]), =(., [0]))").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|(, =(., [0]))").as_str()) .parse(format!("filter({}, point{{[0]}})", "|(, =(., [0]))").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|(|(=(., [0])))").as_str()) .parse(format!("filter({}, point{{[0]}})", "|(|(=(., [0])))").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|()").as_str()) .parse(format!("filter({}, point{{[0]}})", "|()").as_str())
.is_err()); .is_err());
} }
@@ -474,11 +474,11 @@ mod parsing {
assert!(p.parse("bag{}").is_err()); assert!(p.parse("bag{}").is_err());
assert!(p.parse("bag{inside(point{[0]})}").is_ok()); assert!(p.parse("bag{point{[0]}}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]})}").is_ok()); assert!(p.parse("bag{point{[0]}, point{[0]}}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]}), inside(point{[0]})}").is_ok()); assert!(p.parse("bag{point{[0]}, point{[0]}, point{[0]}}").is_ok());
assert!(p assert!(p
.parse("bag{inside(point{[0]}), inside(hypersphere{[0], 1}), inside(hyperrectangle{[0], [1]})}") .parse("bag{point{[0]}, hypersphere{[0], 1}, hyperrectangle{[0], [1]}}")
.is_ok()); .is_ok());
} }
@@ -518,21 +518,21 @@ mod parsing {
// At least two positions when it is aligned with the axis, otherwise an even number // At least two positions when it is aligned with the axis, otherwise an even number
// of positions, as the number of vertices follows the rule 2**k, where k is the number // of positions, as the number of vertices follows the rule 2**k, where k is the number
// of dimensions of the space containing the hyperrectangle. // of dimensions of the space containing the hyperrectangle.
assert!(p.parse("inside(hyperrectangle{})").is_err()); assert!(p.parse("hyperrectangle{}").is_err());
assert!(p.parse("inside(hyperrectangle{[]})").is_err()); assert!(p.parse("hyperrectangle{[]}").is_err());
assert!(p.parse("inside(hyperrectangle{[0]})").is_err()); assert!(p.parse("hyperrectangle{[0]}").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2]})").is_err()); assert!(p.parse("hyperrectangle{[0], [1], [2]}").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3], [4]})").is_err()); assert!(p.parse("hyperrectangle{[0], [1], [2], [3], [4]}").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1]})").is_ok()); assert!(p.parse("hyperrectangle{[0], [1]}").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], \"space\"})").is_ok()); assert!(p.parse("hyperrectangle{[0], [1], \"space\"}").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok()); assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok()); assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p assert!(p
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5]})") .parse("hyperrectangle{[0], [1], [2], [3], [4], [5]}")
.is_ok()); .is_ok());
assert!(p assert!(p
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"})") .parse("hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"}")
.is_ok()); .is_ok());
} }
@@ -540,23 +540,23 @@ mod parsing {
fn hyperrsphere() { fn hyperrsphere() {
let p = filters_parser(); let p = filters_parser();
assert!(p.parse("inside(hypersphere{}").is_err()); assert!(p.parse("hypersphere{}").is_err());
assert!(p.parse("inside(hypersphere{[]})").is_err()); assert!(p.parse("hypersphere{[]}").is_err());
assert!(p.parse("inside(hypersphere{[0]})").is_err()); assert!(p.parse("hypersphere{[0]}").is_err());
assert!(p.parse("inside(hypersphere{[0], 23})").is_ok()); assert!(p.parse("hypersphere{[0], 23}").is_ok());
assert!(p.parse("inside(hypersphere{[0], 23, \"space\"})").is_ok()); assert!(p.parse("hypersphere{[0], 23, \"space\"}").is_ok());
} }
#[test] #[test]
fn point() { fn point() {
let p = filters_parser(); let p = filters_parser();
assert!(p.parse("inside(point{})").is_err()); assert!(p.parse("point{}").is_err());
assert!(p.parse("inside(point{[]})").is_err()); assert!(p.parse("point{[]}").is_err());
assert!(p.parse("inside(point{[0]})").is_ok()); assert!(p.parse("point{[0]}").is_ok());
assert!(p.parse("inside(point{[0], \"space\"})").is_ok()); assert!(p.parse("point{[0], \"space\"}").is_ok());
} }
#[test] #[test]
@@ -579,30 +579,30 @@ mod parsing {
assert!(p assert!(p
.parse( .parse(
format!( format!(
"filter(=({}, [1]), inside(point{{[0]}}))", "filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(.field, \"\")" "str_cmp_ignore_case(.field, \"\")"
) )
.as_str() .as_str()
) )
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "[0]").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "[0]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "inside(point{[0]})").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "point{[0]}").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "{0}").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "{0}").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "").as_str())
.is_err()); .is_err());
}*/ }*/
@@ -612,15 +612,15 @@ mod parsing {
assert!(p assert!(p
.parse( .parse(
format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str() format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str()
) )
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field)").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field)").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(\"\")").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(\"\")").as_str())
.is_err()); .is_err());
} }
@@ -631,7 +631,7 @@ mod parsing {
assert!(p assert!(p
.parse( .parse(
format!( format!(
"filter(=({}, [1]), inside(point{{[0]}}))", "filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(.field, \"\")" "str_cmp_ignore_case(.field, \"\")"
) )
.as_str() .as_str()
@@ -641,7 +641,7 @@ mod parsing {
assert!(p assert!(p
.parse( .parse(
format!( format!(
"filter(=({}, [1]), inside(point{{[0]}}))", "filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(.field)" "str_cmp_ignore_case(.field)"
) )
.as_str() .as_str()
@@ -650,7 +650,7 @@ mod parsing {
assert!(p assert!(p
.parse( .parse(
format!( format!(
"filter(=({}, [1]), inside(point{{[0]}}))", "filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(\"\")" "str_cmp_ignore_case(\"\")"
) )
.as_str() .as_str()
@@ -663,19 +663,19 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", ".").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field[1].field").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", ".field[1].field").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field[1]").as_str()) .parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field[1]").as_str())
.is_ok()); .is_ok());
} }
@@ -684,26 +684,26 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
// Empty // Empty
assert!(p.parse(format!("inside(point{{{}}})", "[]").as_str()).is_err()); assert!(p.parse(format!("point{{{}}}", "[]").as_str()).is_err());
// Non-numerical coordinate: // Non-numerical coordinate:
assert!(p.parse(format!("inside(point{{{}}})", "[aa]").as_str()).is_err()); assert!(p.parse(format!("point{{{}}}", "[aa]").as_str()).is_err());
assert!(p assert!(p
.parse(format!("inside(point{{{}}})", "[\"aa\"]").as_str()) .parse(format!("point{{{}}}", "[\"aa\"]").as_str())
.is_err()); .is_err());
// One or more coordinates // One or more coordinates
assert!(p.parse(format!("inside(point{{{}}})", "[0]").as_str()).is_ok()); assert!(p.parse(format!("point{{{}}}", "[0]").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{{}}})", "[0, 0]").as_str()).is_ok()); assert!(p.parse(format!("point{{{}}}", "[0, 0]").as_str()).is_ok());
assert!(p assert!(p
.parse(format!("inside(point{{{}}})", "[0, 0, 0]").as_str()) .parse(format!("point{{{}}}", "[0, 0, 0]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(point{{{}}})", "[0, 0, 0, 0]").as_str()) .parse(format!("point{{{}}}", "[0, 0, 0, 0]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(point{{{}}})", "[0,0,0,0]").as_str()) .parse(format!("point{{{}}}", "[0,0,0,0]").as_str())
.is_ok()); .is_ok());
} }
@@ -713,66 +713,66 @@ mod parsing {
// Single dot // Single dot
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".").as_str())
.is_ok()); .is_ok());
// Check first character is within allowed characters // Check first character is within allowed characters
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".a").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".a").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", "._").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", "._").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".2").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".2").as_str())
.is_err()); .is_err());
// Check second character is within allowed characters // Check second character is within allowed characters
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".fa").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".fa").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f_").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f_").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.is_ok()); .is_ok());
// Check we can add subscript // Check we can add subscript
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".[23]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".[23]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[0]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[0]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2]").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[23]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[23]").as_str())
.is_ok()); .is_ok());
// Invalid index values // Invalid index values
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2.3]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2.3]").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[02]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[02]").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[-2]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[-2]").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2e2]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2e2]").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2E2]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2E2]").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[+2]").as_str()) .parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[+2]").as_str())
.is_err()); .is_err());
} }
@@ -836,42 +836,42 @@ mod parsing {
// Integers // Integers
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "0").as_str()) .parse(format!("hypersphere{{[0],{}}}", "0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+0").as_str()) .parse(format!("hypersphere{{[0],{}}}", "+0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-0").as_str()) .parse(format!("hypersphere{{[0],{}}}", "-0").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "1").as_str()) .parse(format!("hypersphere{{[0],{}}}", "1").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+1").as_str()) .parse(format!("hypersphere{{[0],{}}}", "+1").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-1").as_str()) .parse(format!("hypersphere{{[0],{}}}", "-1").as_str())
.is_err()); .is_err());
// Floating point values // Floating point values
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "0.0").as_str()) .parse(format!("hypersphere{{[0],{}}}", "0.0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.0").as_str()) .parse(format!("hypersphere{{[0],{}}}", "+0.0").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.0").as_str()) .parse(format!("hypersphere{{[0],{}}}", "-0.0").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "0.1").as_str()) .parse(format!("hypersphere{{[0],{}}}", "0.1").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.01").as_str()) .parse(format!("hypersphere{{[0],{}}}", "+0.01").as_str())
.is_ok()); .is_ok());
assert!(p assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.01").as_str()) .parse(format!("hypersphere{{[0],{}}}", "-0.01").as_str())
.is_err()); .is_err());
} }
@@ -880,20 +880,20 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
// Integers // Integers
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "+0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "-0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "+1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "-1").as_str()).is_ok());
// Floating point values // Floating point values
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.01").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.01").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "-0.01").as_str()).is_ok());
} }
#[test] #[test]
@@ -901,54 +901,54 @@ mod parsing {
let p = filters_parser(); let p = filters_parser();
// Integers // Integers
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e+2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e-2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1E2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "100").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "100").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "010").as_str()).is_err()); assert!(p.parse(format!("point{{[{}]}}", "010").as_str()).is_err());
// Floating point values (normalized) // Floating point values (normalized)
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e+2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e-2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E23").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.01").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.").as_str()).is_err()); assert!(p.parse(format!("point{{[{}]}}", "0.").as_str()).is_err());
assert!(p assert!(p
.parse(format!("inside(point{{[{}]}})", "0.1E03").as_str()) .parse(format!("point{{[{}]}}", "0.1E03").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("inside(point{{[{}]}})", "0.1E0.3").as_str()) .parse(format!("point{{[{}]}}", "0.1E0.3").as_str())
.is_err()); .is_err());
// Floating point values (denormalized) // Floating point values (denormalized)
assert!(p.parse(format!("inside(point{{[{}]}})", "1.0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e0").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e+2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e-2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E2").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E23").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.01").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "1.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "10.1").as_str()).is_ok()); assert!(p.parse(format!("point{{[{}]}}", "10.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.").as_str()).is_err()); assert!(p.parse(format!("point{{[{}]}}", "1.").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "01.1").as_str()).is_err()); assert!(p.parse(format!("point{{[{}]}}", "01.1").as_str()).is_err());
assert!(p assert!(p
.parse(format!("inside(point{{[{}]}})", "1.1E03").as_str()) .parse(format!("point{{[{}]}}", "1.1E03").as_str())
.is_err()); .is_err());
assert!(p assert!(p
.parse(format!("inside(point{{[{}]}})", "1.1E0.3").as_str()) .parse(format!("point{{[{}]}}", "1.1E0.3").as_str())
.is_err()); .is_err());
} }
} }

View File

@@ -54,14 +54,21 @@ impl Validator for Bag {
} }
match self { match self {
Bag::ViewPort(bag) => bag.validate(),
Bag::Distinct(bag) => bag.validate(), Bag::Distinct(bag) => bag.validate(),
Bag::Filter(_, bag) => bag.validate(), Bag::Filter(_, bag) => match bag {
None => Ok(LiteralPosition(vec![]).get_type()),
Some(b) => b.validate(),
},
Bag::Complement(bag) => bag.validate(), Bag::Complement(bag) => bag.validate(),
Bag::Intersection(lh, rh) => compare_bag_types(lh, rh), Bag::Intersection(lh, rh) => compare_bag_types(lh, rh),
Bag::Union(lh, rh) => compare_bag_types(lh, rh), Bag::Union(lh, rh) => compare_bag_types(lh, rh),
Bag::Bag(bags) => { Bag::Bag(bags) => {
for b in bags { for b in bags {
b.validate()?; let t = b.validate();
if t.is_err() {
return t;
}
} }
Ok(get_type()) Ok(get_type())