Compare commits

..

1 Commits

Author SHA1 Message Date
f68ec7af14 Fix compilation with rustc 1.4.2
More adaptation are required to be able to use the most recent version
of Rust, therefore fix the dependencies and the compiler version.
2024-08-08 12:14:21 +02:00
12 changed files with 663 additions and 685 deletions

View File

@@ -29,24 +29,23 @@ name = "parser-driver"
path = "src/main.rs"
required-features = ["bin"]
[profile.release]
lto = true
[features]
bin = ["measure_time", "pretty_env_logger"]
[dependencies]
mercator_db = "0.1"
mercator_db = "^0.1"
lalrpop-util = "0.20"
lalrpop-util = "^0.17"
regex = "^1.2"
# Logging macros API
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
pretty_env_logger = { version = "^0.3", optional = true } # Logger implementation
measure_time = { version = "^0.6", optional = true } # To mesure parsing time, only required by binary
[build-dependencies]
lalrpop = "0.20"
lalrpop = "^0.17.1"

View File

@@ -1,2 +1,2 @@
[toolchain]
channel = "1.80.0"
channel = "1.42.0"

View File

@@ -1,358 +1,370 @@
use std::collections::{HashMap, HashSet};
use std::rc::Rc;
use mercator_db::space;
use mercator_db::Core;
use mercator_db::CoreQueryParameters;
use mercator_db::IterObjects;
use mercator_db::IterObjectsBySpaces;
use mercator_db::Properties;
use super::expressions::*;
use super::symbols::*;
fn group_by_space<'s>(
list: IterObjectsBySpaces<'s>,
) -> Box<dyn Iterator<Item = (&'s String, IterObjects<'s>)> + 's> {
// Filter per Properties, in order to regroup by it, then build
// a single SpatialObject per Properties.
let mut hashmap = HashMap::new();
for (space, objects) in list {
hashmap.entry(space).or_insert_with(Vec::new).push(objects);
}
Box::new(hashmap.into_iter().map(|(space, objects)| {
let objects: IterObjects = Box::new(objects.into_iter().flatten());
(space, objects)
}))
}
fn distinct_helper(list: IterObjectsBySpaces) -> IterObjectsBySpaces {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(list)
// We would lose some objects otherwise when creating the
// HashMaps. Also this makes sure to keep the values are unique.
.map(|(space, iter)| {
let uniques: HashSet<_> = iter.collect();
let uniques: IterObjects = Box::new(uniques.into_iter());
(space, uniques)
})
.collect()
}
fn into_positions_hashset(
objects_by_spaces: IterObjectsBySpaces,
) -> HashMap<&String, Rc<HashSet<space::Position>>> {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(objects_by_spaces)
// We would lose some objects otherwise when creating the HashSets.
.map(|(space, iter)| {
let hash_set: HashSet<_> = iter.map(|(position, _)| position).collect();
(space, Rc::new(hash_set))
})
.collect::<HashMap<_, _>>()
}
// Strictly not inside nor on the surface.
// TODO: inside must contains the valid positions in all expected spaces
fn complement_helper<'h>(
core: &'h Core,
parameters: &'h CoreQueryParameters<'h>,
space_id: &'h str,
inside: IterObjectsBySpaces<'h>,
) -> mercator_db::ResultSet<'h> {
let (low, high) = parameters.db.space(space_id)?.bounding_box();
let inside = into_positions_hashset(inside);
let points = core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
let results = points
.into_iter()
.filter_map(move |(space, v)| match inside.get(space) {
None => None, // Space not found, so no point might exist!
Some(volume) => {
let volume = volume.clone();
let iter: IterObjects = Box::new(v.filter(move |a| !volume.contains(&a.0)));
Some((space, iter))
}
})
.collect();
Ok(results)
}
// Intersection based only on spatial positions!
fn intersect_helper<'h>(
smaller: IterObjectsBySpaces<'h>,
bigger: IterObjectsBySpaces<'h>,
) -> IterObjectsBySpaces<'h> {
let smaller = into_positions_hashset(smaller);
bigger
.into_iter()
.filter_map(
move |(space, bigger_object_iter)| match smaller.get(space) {
None => None,
Some(volume) => {
let volume = volume.clone();
let filtered: IterObjects =
Box::new(bigger_object_iter.filter(move |a| volume.contains(&a.0)));
Some((space, filtered))
}
},
)
.collect()
}
impl Bag {
fn distinct<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
Ok(distinct_helper(results))
}
fn complement<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
core: &'b Core,
) -> mercator_db::ResultSet<'b> {
let inside = self.execute(core_id, parameters)?;
// FIXME: The complement of a set should be computed within its
// definition space. We don't know here so we use universe
complement_helper(
core,
parameters,
mercator_db::space::Space::universe().name(),
inside,
)
}
fn intersection<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let left = self.execute(core_id, parameters)?;
let right = rh.execute(core_id, parameters)?;
let v = if rh.predict(parameters.db) < self.predict(parameters.db) {
intersect_helper(right, left)
} else {
intersect_helper(left, right)
};
Ok(v)
}
fn union<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let mut left = self.execute(core_id, parameters)?;
let mut right = rh.execute(core_id, parameters)?;
let union = if rh.predict(parameters.db) < self.predict(parameters.db) {
left.append(&mut right);
left
} else {
right.append(&mut left);
right
};
Ok(union)
}
fn filter<'b>(
&'b self,
predicate: &'b Predicate,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
Ok(results
.into_iter()
.map(move |(space, positions)| {
let positions = positions.collect::<Vec<_>>();
(
space,
Box::new(positions.into_iter().filter(move |(position, properties)| {
predicate.eval((space, position, properties))
})) as IterObjects,
)
})
.collect())
impl From<&LiteralPosition> for space::Position {
fn from(literal: &LiteralPosition) -> Self {
let v: Vec<f64> = literal.into();
v.into()
}
}
impl Shape {
fn inside<'s>(
&'s self,
parameters: &'s CoreQueryParameters<'s>,
core: &'s Core,
) -> mercator_db::ResultSet<'s> {
let db = parameters.db;
let param = match self {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
//FIXME: Support arbitrary HyperRectangles
Err(
"The number of position is different from 2, which is unsupported."
.to_string(),
)
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
// We have to provide a position with all the dimensions
// for the encoding to work as expected.
let mut r = vec![0f64; position.dimensions()];
r[0] = radius.into();
let radius = space.encode(&r)?[0];
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, shape, space_id),
Err(e) => Err(e),
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
}
fn outside<'s>(
&'s self,
parameters: &'s CoreQueryParameters<'s>,
core: &'s Core,
) -> mercator_db::ResultSet<'s> {
let (space_id, inside) = match self {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
let positions = vec![position.into()];
let inside = core.get_by_positions(parameters, positions, space_id)?;
fn complement_helper<'c>(
core: &'c Core,
parameters: &CoreQueryParameters<'c>,
space_id: &str,
inside: Vec<(&'c String, Vec<(space::Position, &'c Properties)>)>,
) -> mercator_db::ResultSet<'c> {
let (low, high) = parameters.db.space(space_id)?.bounding_box();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(points) => {
let hashmap = inside.into_iter().collect::<HashMap<_, _>>();
Ok((space_id, inside))
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
Ok(points
.into_iter()
.filter_map(|(space, v)| match hashmap.get(space) {
None => None,
Some(list) => {
Some((space, v.into_iter().filter(|t| !list.contains(t)).collect()))
}
})
.collect::<Vec<_>>())
}
}
}
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(f64::EPSILON);
fn view_port<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
if let Some((low, high)) = parameters.view_port {
let vp = Bag::Inside(Shape::HyperRectangle(
bag.space().clone(),
vec![low.into(), high.into()],
));
intersection(core_id, parameters, &vp, bag)
} else {
bag.execute(core_id, parameters)
}
}
fn distinct<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(mut v) => {
let set: HashSet<_> = v.drain(..).collect(); // dedup
v.extend(set.into_iter());
Ok(v)
}
}
}
fn filter_helper<'c>(
predicate: &Predicate,
bag: &Bag,
core_id: &str,
parameters: &CoreQueryParameters<'c>,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(results) => Ok(results
.into_iter()
.filter_map(|(space, positions)| {
let filtered = positions
.into_iter()
.filter(|(position, properties)| predicate.eval((space, position, properties)))
.collect::<Vec<_>>();
if filtered.is_empty() {
None
} else {
Some((space, filtered))
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
let inside =
core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
Ok((space_id, inside))
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= f64::EPSILON;
let center: space::Position = center.into();
let inside = core.get_by_shape(
parameters,
space::Shape::HyperSphere(center, radius.into()),
space_id,
)?;
Ok((space_id, inside))
}
Shape::Label(space_id, id) => {
let inside = core.get_by_label(parameters, id)?;
Ok((space_id, inside))
}
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}?;
complement_helper(core, parameters, space_id, inside)
})
.collect::<Vec<_>>()),
}
}
fn filter<'c>(
core_id: &'c str,
parameters: &'c CoreQueryParameters<'c>,
predicate: &'c Option<Predicate>,
bag: &'c Bag,
core_id: &str,
parameters: &CoreQueryParameters<'c>,
predicate: &Option<Predicate>,
bag: &Option<Box<Bag>>,
) -> mercator_db::ResultSet<'c> {
match predicate {
None => bag.execute(core_id, parameters),
Some(predicate) => bag.filter(predicate, core_id, parameters),
None => {
if let Some(bag) = bag {
bag.execute(core_id, parameters)
} else {
Err("Filter without predicate nor data set.".to_string())
}
}
Some(predicate) => match bag {
None => {
let (low, high) = space::Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = Shape::HyperRectangle(
space::Space::universe().name().clone(),
vec![
LiteralPosition(
low.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
LiteralPosition(
high.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
],
);
filter_helper(predicate, &Bag::Inside(shape), core_id, parameters)
}
Some(bag) => filter_helper(predicate, bag.as_ref(), core_id, parameters),
},
}
}
fn complement<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
// FIXME: The complement of a set is computed within its definition space.
e @ Err(_) => e,
Ok(inside) => complement_helper(
core,
parameters,
mercator_db::space::Space::universe().name(),
inside,
),
}
}
fn intersection<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet<'c> {
let l = lh.execute(core_id, parameters);
if let Ok(l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(r) = r {
let mut v = vec![];
if rh.predict(parameters.db) < lh.predict(parameters.db) {
for o in r {
if l.contains(&o) {
v.push(o);
}
}
} else {
for o in l {
if r.contains(&o) {
v.push(o);
}
}
}
Ok(v)
} else {
r
}
} else {
l
}
}
fn union<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet<'c> {
let l = lh.execute(core_id, parameters);
if let Ok(mut l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(mut r) = r {
if rh.predict(parameters.db) < lh.predict(parameters.db) {
l.append(&mut r);
Ok(l)
} else {
r.append(&mut l);
Ok(r)
}
} else {
r
}
} else {
l
}
}
fn bag<'c>(
core_id: &'c str,
parameters: &'c CoreQueryParameters<'c>,
bags: &'c [Bag],
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bags: &[Bag],
) -> mercator_db::ResultSet<'c> {
let mut results = Vec::new();
let mut v = vec![];
for bag in bags {
let mut result = bag.execute(core_id, parameters)?;
results.append(&mut result);
let b = bag.execute(core_id, parameters);
match b {
e @ Err(_) => {
return e;
}
Ok(mut b) => {
v.append(&mut b);
}
}
}
Ok(results)
Ok(v)
}
fn inside<'c>(
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
shape: &Shape,
) -> mercator_db::ResultSet<'c> {
let db = parameters.db;
let param = match shape {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
Err("The number of position is different from 2, which is unsupported.".to_string())
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
let mut r = vec![];
for _ in 0..position.dimensions() {
r.push(radius.into());
}
let radius = space.encode(&r)?[0];
//FIXME: RADIUS IS A LENGTH, HOW TO ENCODE IT INTO THE SPACE?
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, &shape, space_id),
Err(e) => Err(e),
}
}
fn outside<'c>(
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
shape: &Shape,
) -> mercator_db::ResultSet<'c> {
match shape {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
match core.get_by_positions(parameters, &[position.into()], space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(std::f64::EPSILON);
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= std::f64::EPSILON;
let center: space::Position = center.into();
match core.get_by_shape(
parameters,
&space::Shape::HyperSphere(center, radius.into()),
space_id,
) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::Label(_, _) => Err("Label: not yet implemented".to_string()),
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}
}
impl<'e> Executor<'e> for Projection {
type ResultSet = mercator_db::ResultSet<'e>;
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
fn execute<'f: 'e>(
&self,
core_id: &str,
parameters: &CoreQueryParameters<'f>,
) -> Self::ResultSet {
match self {
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
Projection::Json(_, _format, bag) => {
Projection::JSON(_, _format, bag) => {
bag.execute(core_id, parameters)
// FIXME: Add projections here
}
@@ -363,26 +375,27 @@ impl<'e> Executor<'e> for Projection {
impl<'e> Executor<'e> for Bag {
type ResultSet = mercator_db::ResultSet<'e>;
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
fn execute<'f: 'e>(
&self,
core_id: &str,
parameters: &CoreQueryParameters<'f>,
) -> Self::ResultSet {
let core = parameters.db.core(core_id)?;
match self {
Bag::Distinct(bag) => bag.distinct(core_id, parameters),
Bag::ViewPort(bag) => view_port(core_id, parameters, bag),
Bag::Distinct(bag) => distinct(core_id, parameters, bag),
Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag),
Bag::Complement(bag) => bag.complement(core_id, parameters, core),
Bag::Intersection(lh, rh) => lh.intersection(core_id, parameters, rh),
Bag::Union(lh, rh) => lh.union(core_id, parameters, rh),
Bag::Complement(bag) => complement(core_id, parameters, core, bag),
Bag::Intersection(lh, rh) => intersection(core_id, parameters, rh, lh),
Bag::Union(lh, rh) => union(core_id, parameters, rh, lh),
Bag::Bag(list) => bag(core_id, parameters, list),
Bag::Inside(shape) => shape.inside(parameters, core),
Bag::Inside(shape) => inside(parameters, core, shape),
Bag::Outside(shape) => {
//FIXME: This is currently computed as the complement of the values within the shape, except its surface.
// Should this be instead a list of positions within the shape?
//FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested?
shape.outside(parameters, core)
outside(parameters, core, shape)
}
}
}

View File

@@ -14,10 +14,10 @@ pub trait Predictor {
pub trait Executor<'e> {
type ResultSet;
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
fn execute<'f: 'e>(
&self,
core_id: &str,
parameters: &CoreQueryParameters<'f>,
) -> Self::ResultSet;
}

View File

@@ -38,20 +38,20 @@ lalrpop_mod!(#[allow(clippy::all,unused_parens)] pub queries); // synthesized by
// as well.
// Instead we enable it per modules below, except for the tests.
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod evaluators;
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod executors;
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod expressions;
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod predictors;
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod validators;
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod symbols;
//#[warn(missing_docs)]
#[warn(missing_docs)]
mod types;
pub use expressions::Executor;
@@ -59,8 +59,6 @@ pub use expressions::Predictor;
pub use expressions::Validator;
pub use queries::FiltersParser;
pub use queries::QueryParser;
pub use symbols::Bag;
pub use symbols::Projection;
pub use validators::ValidationResult;
#[cfg(test)]

View File

@@ -93,17 +93,12 @@ fn main() {
execute = t.execute(core, &parameters);
}
match execute {
Ok(r) => {
let r = r
.into_iter()
.map(|(space, objects)| (space, objects.collect::<Vec<_>>()))
.collect::<Vec<_>>();
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r[0].1.len());
}
Err(e) => info!("Execution: \n{:?}", e),
if let Ok(r) = execute {
//let r = mercator_db::json::model::to_spatial_objects(r);
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r.len());
} else {
info!("Execution: \n{:?}", execute);
}
}
}

View File

@@ -1,3 +1,4 @@
use mercator_db::space;
use mercator_db::DataBase;
use super::expressions::Predictor;
@@ -7,7 +8,7 @@ impl Predictor for Projection {
fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self {
Projection::Nifti(_, _, bag) => bag.predict(db),
Projection::Json(_, _, bag) => bag.predict(db),
Projection::JSON(_, _, bag) => bag.predict(db),
}
}
}
@@ -15,8 +16,12 @@ impl Predictor for Projection {
impl Predictor for Bag {
fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self {
Bag::ViewPort(bag) => bag.predict(db),
Bag::Distinct(bag) => bag.predict(db),
Bag::Filter(_, bag) => bag.predict(db),
Bag::Filter(_, bag) => match bag {
None => Ok(db.space(space::Space::universe().name())?.volume()),
Some(b) => b.predict(db),
},
Bag::Complement(bag) => Ok(db.space(bag.space())?.volume() - bag.predict(db)?),
Bag::Intersection(lh, rh) => {
let l = lh.predict(db)?;

View File

@@ -51,7 +51,7 @@ JsonOperator: symbols::Projection = {
None => Space::universe().name().clone(),
};
symbols::Projection::Json(space_id, f, b)
symbols::Projection::JSON(space_id, f, b)
}
};
@@ -137,6 +137,8 @@ Aggregations: symbols::Aggregation = {
//*********************************************************************/
pub Filters: symbols::Bag = {
<Bags>
//<Bags> =>
// symbols::Bag::ViewPort(Box::new(<>))
};
// All these expressions generate bags.
@@ -187,25 +189,13 @@ Union: symbols::Bag = {
Filter: symbols::Bag = {
// "filter" "(" <p:Predicates> "," <b:Bags> ")" =>
"filter" "(" <b:Bags> ")" =>
symbols::Bag::Filter(None, Box::new(b)),
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" => {
symbols::Bag::Filter(None, Some(Box::new(b))),
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" =>
match b {
None => {
let (low, high) = Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = symbols::Shape::HyperRectangle(
Space::universe().name().clone(),
vec![
symbols::LiteralPosition(low.into_iter().map(symbols::LiteralNumber::Float).collect()),
symbols::LiteralPosition(high.into_iter().map(symbols::LiteralNumber::Float).collect()),
],
);
symbols::Bag::Filter(Some(p), Box::new(symbols::Bag::Inside(shape)))
}
Some(b) => symbols::Bag::Filter(Some(p), Box::new(b)),
None => symbols::Bag::Filter(Some(p), None),
Some(b) => symbols::Bag::Filter(Some(p), Some(Box::new(b))),
}
},
};
Predicates: symbols::Predicate = {

View File

@@ -11,14 +11,14 @@ pub use super::types::*;
#[derive(Clone, Debug)]
pub enum Projection {
Nifti(String, LiteralSelector, Bag),
Json(String, JsonValue, Bag),
JSON(String, JsonValue, Bag),
}
impl Projection {
pub fn space(&self) -> &String {
match self {
Projection::Nifti(space, _, _) => space,
Projection::Json(space, _, _) => space,
Projection::Nifti(space, _, _) => &space,
Projection::JSON(space, _, _) => &space,
}
}
}
@@ -57,9 +57,11 @@ struct Transform {
/**********************************************************************/
#[derive(Clone, Debug)]
pub enum Bag {
// This is an implicit operator, inserted by the parser. Never to be used directly.
ViewPort(Box<Bag>),
// Bags
Distinct(Box<Bag>),
Filter(Option<Predicate>, Box<Bag>),
Filter(Option<Predicate>, Option<Box<Bag>>),
Complement(Box<Bag>),
Intersection(Box<Bag>, Box<Bag>),
Union(Box<Bag>, Box<Bag>),
@@ -73,8 +75,12 @@ pub enum Bag {
impl Bag {
pub fn space(&self) -> &String {
match self {
Bag::ViewPort(bag) => bag.space(),
Bag::Distinct(bag) => bag.space(),
Bag::Filter(_, bag) => bag.space(),
Bag::Filter(_, bag) => match bag {
None => space::Space::universe().name(),
Some(b) => b.space(),
},
Bag::Complement(bag) => bag.space(),
Bag::Intersection(lh, _) => {
// We are assuming lh and rh are in the same space.
@@ -138,7 +144,7 @@ impl Shape {
pub fn volume(&self) -> f64 {
match self {
Shape::Point(_, _) => f64::EPSILON, // The smallest non-zero volume possible
Shape::Point(_, _) => std::f64::EPSILON, // The smallest non-zero volume possible
Shape::HyperRectangle(_space, pos) => {
//TODO: At this time, only aligned to the axes, defined by two points, hyperrectangles are supported.
assert_eq!(pos.len(), 2);
@@ -202,7 +208,7 @@ impl Shape {
}
Shape::Label(_, _) => {
// FIXME: Needs to find a way to figure out the approximate volume of this specific ID, or return MAX or MIN..
f64::EPSILON
std::f64::EPSILON
}
Shape::Nifti(_) => unimplemented!("Nifti"),
}
@@ -237,9 +243,7 @@ impl Position {
Ordering::Greater => 1,
Ordering::Less => -1,
};
let v = vec![LiteralNumber::Int(x)];
LiteralPosition(v)
LiteralPosition(vec![LiteralNumber::Int(x)])
}
}
}
@@ -258,33 +262,14 @@ pub enum LiteralNumber {
Float(f64),
}
impl From<&LiteralNumber> for f64 {
impl From<&LiteralNumber> for Vec<f64> {
fn from(l: &LiteralNumber) -> Self {
match l {
let r = match l {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
}
}
}
};
impl From<LiteralNumber> for f64 {
fn from(l: LiteralNumber) -> Self {
(&l).into()
}
}
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
}
impl From<LiteralNumber> for space::Coordinate {
fn from(literal: LiteralNumber) -> Self {
(&literal).into()
vec![r]
}
}
@@ -309,7 +294,7 @@ pub struct LiteralPosition(pub Vec<LiteralNumber>);
impl LiteralPosition {
pub fn get_type(&self) -> LiteralTypes {
let Self(v) = self;
let mut t = Vec::with_capacity(v.len());
let mut t = Vec::new();
for n in v {
t.push(match n {
@@ -342,35 +327,34 @@ impl LiteralPosition {
}
}
impl From<&LiteralPosition> for Vec<f64> {
fn from(l: &LiteralPosition) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//let LiteralPosition(v) = l;
//v.iter().map(|literal| literal.into()).collect()
let LiteralPosition(v) = l;
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(value.into());
impl From<&LiteralNumber> for f64 {
fn from(l: &LiteralNumber) -> Self {
match l {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
}
lv
}
}
impl From<LiteralPosition> for Vec<f64> {
fn from(l: LiteralPosition) -> Self {
(&l).into()
impl From<&LiteralPosition> for Vec<f64> {
fn from(l: &LiteralPosition) -> Self {
let LiteralPosition(v) = l;
let mut r = Vec::with_capacity(v.len());
for x in v {
let x = match x {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
};
r.push(x);
}
r
}
}
impl From<&Vec<f64>> for LiteralPosition {
fn from(v: &Vec<f64>) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//LiteralPosition(v.iter().map(|value| LiteralNumber::Float(*value)).collect())
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(LiteralNumber::Float(*value));
@@ -379,36 +363,10 @@ impl From<&Vec<f64>> for LiteralPosition {
LiteralPosition(lv)
}
}
impl From<Vec<f64>> for LiteralPosition {
fn from(v: Vec<f64>) -> Self {
(&v).into()
}
}
impl From<&space::Position> for LiteralPosition {
fn from(position: &space::Position) -> Self {
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<space::Position> for LiteralPosition {
fn from(position: space::Position) -> Self {
(&position).into()
}
}
impl From<&LiteralPosition> for space::Position {
fn from(position: &LiteralPosition) -> Self {
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<LiteralPosition> for space::Position {
fn from(position: LiteralPosition) -> Self {
(&position).into()
let lv: Vec<f64> = position.into();
(&lv).into()
}
}

View File

@@ -16,7 +16,7 @@ mod parsing {
fn query() {
let p = query_parser();
let nifti = "nifti(inside(point{[0]}))";
let nifti = "nifti(point{[0]})";
// Option is Empty
assert!(p.parse("").is_ok());
@@ -47,14 +47,14 @@ mod parsing {
let p = query_parser();
// Check allowed forms of the operator
assert!(p.parse("nifti(inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(.properties.id, inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(point{[0]})").is_ok());
assert!(p.parse("nifti(.properties.id, point{[0]})").is_ok());
unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE.
//FIXME: THIS SHOULD BE ALLOWED
assert!(p.parse("nifti(2, inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(2.23, inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(2, point{[0]})").is_ok());
assert!(p.parse("nifti(2.23, point{[0]})").is_ok());
//FIXME: SYNTAX OK, TYPE NOT
assert!(p.parse("nifti(point{[0], \"space\"})").is_err());
@@ -64,16 +64,16 @@ mod parsing {
fn json_operator() {
let p = query_parser();
assert!(p.parse("json(true, inside(point{[0]}))").is_ok());
assert!(p.parse("json(23, inside(point{[0]}))").is_ok());
assert!(p.parse("json([23, 24], inside(point{[0]}))").is_ok());
assert!(p.parse("json([23, count(.)], inside(point{[0]}))").is_ok());
assert!(p.parse("json(true, point{[0]})").is_ok());
assert!(p.parse("json(23, point{[0]})").is_ok());
assert!(p.parse("json([23, 24], point{[0]})").is_ok());
assert!(p.parse("json([23, count(.)], point{[0]})").is_ok());
assert!(p.parse("json(true)").is_err());
assert!(p.parse("json(true,)").is_err());
assert!(p.parse("json(, inside(point{[0]}))").is_err());
assert!(p.parse("json(inside(point{[0]}))").is_err());
assert!(p.parse("json(, point{[0]})").is_err());
assert!(p.parse("json(point{[0]})").is_err());
assert!(p.parse("json(true, point)").is_err());
}
@@ -83,24 +83,24 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "true").as_str())
.parse(format!("json({}, point{{[0]}})", "true").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "false").as_str())
.parse(format!("json({}, point{{[0]}})", "false").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "null").as_str())
.parse(format!("json({}, point{{[0]}})", "null").as_str())
.is_ok());
// Incorrect capitalisation
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "True").as_str())
.parse(format!("json({}, point{{[0]}})", "True").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "False").as_str())
.parse(format!("json({}, point{{[0]}})", "False").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "Null").as_str())
.parse(format!("json({}, point{{[0]}})", "Null").as_str())
.is_err());
}
@@ -109,24 +109,24 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{}").as_str())
.parse(format!("json({}, point{{[0]}})", "{}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0}").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0, \"field1\": 1}").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0, \"field1\": 1}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": [0, 1]}").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"field\": [0, 1]}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": {\"field1\": 0}}").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"field\": {\"field1\": 0}}").as_str())
.is_ok());
assert!(p
.parse(
format!(
"json({}, inside(point{{[0]}}))",
"json({}, point{{[0]}})",
"{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}"
)
.as_str()
@@ -139,25 +139,25 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{:}").as_str())
.parse(format!("json({}, point{{[0]}})", "{:}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{field: 0}").as_str())
.parse(format!("json({}, point{{[0]}})", "{field: 0}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{0: 0}").as_str())
.parse(format!("json({}, point{{[0]}})", "{0: 0}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": }").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"0\": }").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": 0 }").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"0\": 0 }").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0 }").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0 }").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": \"0\" }").as_str())
.parse(format!("json({}, point{{[0]}})", "{\"field\": \"0\" }").as_str())
.is_ok());
}
@@ -166,20 +166,20 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[, 0]").as_str())
.parse(format!("json({}, point{{[0]}})", "[, 0]").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[]").as_str())
.parse(format!("json({}, point{{[0]}})", "[]").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[0]").as_str())
.parse(format!("json({}, point{{[0]}})", "[0]").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "[0, 1]").as_str())
.parse(format!("json({}, point{{[0]}})", "[0, 1]").as_str())
.is_ok());
assert!(p
.parse(
format!("json({}, inside(point{{[0]}}))", "[{\"field\": 0}, {\"field\": 1}]").as_str()
format!("json({}, point{{[0]}})", "[{\"field\": 0}, {\"field\": 1}]").as_str()
)
.is_ok());
}
@@ -190,40 +190,40 @@ mod parsing {
// count ()
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count()").as_str())
.parse(format!("json({}, point{{[0]}})", "count()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct)").as_str())
.parse(format!("json({}, point{{[0]}})", "count(distinct)").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count(.)").as_str())
.parse(format!("json({}, point{{[0]}})", "count(.)").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct .)").as_str())
.parse(format!("json({}, point{{[0]}})", "count(distinct .)").as_str())
.is_ok());
// sum ()
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "sum()").as_str())
.parse(format!("json({}, point{{[0]}})", "sum()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "sum(.)").as_str())
.parse(format!("json({}, point{{[0]}})", "sum(.)").as_str())
.is_ok());
// min ()
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "min()").as_str())
.parse(format!("json({}, point{{[0]}})", "min()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "min(.)").as_str())
.parse(format!("json({}, point{{[0]}})", "min(.)").as_str())
.is_ok());
// max ()
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "max()").as_str())
.parse(format!("json({}, point{{[0]}})", "max()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "max(.)").as_str())
.parse(format!("json({}, point{{[0]}})", "max(.)").as_str())
.is_ok());
}
@@ -233,42 +233,42 @@ mod parsing {
// Integers
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "0").as_str())
.parse(format!("json({}, point{{[0]}})", "0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+0").as_str())
.parse(format!("json({}, point{{[0]}})", "+0").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-0").as_str())
.parse(format!("json({}, point{{[0]}})", "-0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "1").as_str())
.parse(format!("json({}, point{{[0]}})", "1").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+1").as_str())
.parse(format!("json({}, point{{[0]}})", "+1").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-1").as_str())
.parse(format!("json({}, point{{[0]}})", "-1").as_str())
.is_ok());
// Floating point values
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "0.0").as_str())
.parse(format!("json({}, point{{[0]}})", "0.0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+0.0").as_str())
.parse(format!("json({}, point{{[0]}})", "+0.0").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-0.0").as_str())
.parse(format!("json({}, point{{[0]}})", "-0.0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "0.1").as_str())
.parse(format!("json({}, point{{[0]}})", "0.1").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "+0.01").as_str())
.parse(format!("json({}, point{{[0]}})", "+0.01").as_str())
.is_err());
assert!(p
.parse(format!("json({}, inside(point{{[0]}}))", "-0.01").as_str())
.parse(format!("json({}, point{{[0]}})", "-0.01").as_str())
.is_ok());
}
}
@@ -290,7 +290,7 @@ mod parsing {
assert!(p.parse("").is_err());
assert!(p.parse("inside(point{[0]})").is_ok());
assert!(p.parse("point{[0]}").is_ok());
}
/* Not useful to test this rule
@@ -305,7 +305,7 @@ mod parsing {
assert!(p.parse("distinct()").is_err());
assert!(p.parse("distinct(inside(point{[0]}))").is_ok());
assert!(p.parse("distinct(point{[0]})").is_ok());
}
#[test]
@@ -314,7 +314,7 @@ mod parsing {
assert!(p.parse("complement()").is_err());
assert!(p.parse("complement(inside(point{[0]}))").is_ok());
assert!(p.parse("complement(point{[0]})").is_ok());
}
#[test]
@@ -322,12 +322,12 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("intersection()").is_err());
assert!(p.parse("intersection(inside(point{[0]}))").is_err());
assert!(p.parse("intersection(point{[0]})").is_err());
assert!(p
.parse("intersection(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
.parse("intersection(point{[0]}, point{[0]}, point{[0]})")
.is_err());
assert!(p.parse("intersection(inside(point{[0]}), inside(point{[0]}))").is_ok());
assert!(p.parse("intersection(point{[0]}, point{[0]})").is_ok());
}
#[test]
@@ -335,12 +335,12 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("union()").is_err());
assert!(p.parse("union(inside(point{[0]}))").is_err());
assert!(p.parse("union(point{[0]})").is_err());
assert!(p
.parse("union(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
.parse("union(point{[0]}, point{[0]}, point{[0]})")
.is_err());
assert!(p.parse("union(inside(point{[0]}), inside(point{[0]}))").is_ok());
assert!(p.parse("union(point{[0]}, point{[0]})").is_ok());
}
#[test]
@@ -348,10 +348,10 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("filter()").is_err());
assert!(p.parse("filter(inside(point{[0]}))").is_ok());
assert!(p.parse("filter(point{[0]})").is_ok());
assert!(p.parse("filter(=(., [0]))").is_ok());
assert!(p.parse("filter(=(., [0]), inside(point{[0]}))").is_ok());
assert!(p.parse("filter(=(., [0]), point{[0]})").is_ok());
}
/* Not useful to test this rule
@@ -365,17 +365,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<(., [0])").as_str())
.parse(format!("filter({}, point{{[0]}})", "<(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<(, [0])").as_str())
.parse(format!("filter({}, point{{[0]}})", "<(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<(.)").as_str())
.parse(format!("filter({}, point{{[0]}})", "<(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "<()").as_str())
.parse(format!("filter({}, point{{[0]}})", "<()").as_str())
.is_err());
}
@@ -384,17 +384,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">(., [0])").as_str())
.parse(format!("filter({}, point{{[0]}})", ">(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">(, [0])").as_str())
.parse(format!("filter({}, point{{[0]}})", ">(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">(.)").as_str())
.parse(format!("filter({}, point{{[0]}})", ">(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", ">()").as_str())
.parse(format!("filter({}, point{{[0]}})", ">()").as_str())
.is_err());
}
@@ -403,17 +403,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=(., [0])").as_str())
.parse(format!("filter({}, point{{[0]}})", "=(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=(, [0])").as_str())
.parse(format!("filter({}, point{{[0]}})", "=(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=(.)").as_str())
.parse(format!("filter({}, point{{[0]}})", "=(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "=()").as_str())
.parse(format!("filter({}, point{{[0]}})", "=()").as_str())
.is_err());
}
@@ -422,11 +422,11 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "!(=(., [0]))").as_str())
.parse(format!("filter({}, point{{[0]}})", "!(=(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "!()").as_str())
.parse(format!("filter({}, point{{[0]}})", "!()").as_str())
.is_err());
}
@@ -435,17 +435,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&(=(., [0]), =(., [0]))").as_str())
.parse(format!("filter({}, point{{[0]}})", "&(=(., [0]), =(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&(, =(., [0]))").as_str())
.parse(format!("filter({}, point{{[0]}})", "&(, =(., [0]))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&(|(=(., [0])))").as_str())
.parse(format!("filter({}, point{{[0]}})", "&(|(=(., [0])))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "&()").as_str())
.parse(format!("filter({}, point{{[0]}})", "&()").as_str())
.is_err());
}
@@ -454,17 +454,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|(=(., [0]), =(., [0]))").as_str())
.parse(format!("filter({}, point{{[0]}})", "|(=(., [0]), =(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|(, =(., [0]))").as_str())
.parse(format!("filter({}, point{{[0]}})", "|(, =(., [0]))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|(|(=(., [0])))").as_str())
.parse(format!("filter({}, point{{[0]}})", "|(|(=(., [0])))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, inside(point{{[0]}}))", "|()").as_str())
.parse(format!("filter({}, point{{[0]}})", "|()").as_str())
.is_err());
}
@@ -474,11 +474,11 @@ mod parsing {
assert!(p.parse("bag{}").is_err());
assert!(p.parse("bag{inside(point{[0]})}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]})}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]}), inside(point{[0]})}").is_ok());
assert!(p.parse("bag{point{[0]}}").is_ok());
assert!(p.parse("bag{point{[0]}, point{[0]}}").is_ok());
assert!(p.parse("bag{point{[0]}, point{[0]}, point{[0]}}").is_ok());
assert!(p
.parse("bag{inside(point{[0]}), inside(hypersphere{[0], 1}), inside(hyperrectangle{[0], [1]})}")
.parse("bag{point{[0]}, hypersphere{[0], 1}, hyperrectangle{[0], [1]}}")
.is_ok());
}
@@ -518,21 +518,21 @@ mod parsing {
// At least two positions when it is aligned with the axis, otherwise an even number
// of positions, as the number of vertices follows the rule 2**k, where k is the number
// of dimensions of the space containing the hyperrectangle.
assert!(p.parse("inside(hyperrectangle{})").is_err());
assert!(p.parse("inside(hyperrectangle{[]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3], [4]})").is_err());
assert!(p.parse("hyperrectangle{}").is_err());
assert!(p.parse("hyperrectangle{[]}").is_err());
assert!(p.parse("hyperrectangle{[0]}").is_err());
assert!(p.parse("hyperrectangle{[0], [1], [2]}").is_err());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3], [4]}").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1]})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], \"space\"})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
assert!(p.parse("hyperrectangle{[0], [1]}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], \"space\"}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5]})")
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5]}")
.is_ok());
assert!(p
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"})")
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"}")
.is_ok());
}
@@ -540,23 +540,23 @@ mod parsing {
fn hyperrsphere() {
let p = filters_parser();
assert!(p.parse("inside(hypersphere{}").is_err());
assert!(p.parse("inside(hypersphere{[]})").is_err());
assert!(p.parse("inside(hypersphere{[0]})").is_err());
assert!(p.parse("hypersphere{}").is_err());
assert!(p.parse("hypersphere{[]}").is_err());
assert!(p.parse("hypersphere{[0]}").is_err());
assert!(p.parse("inside(hypersphere{[0], 23})").is_ok());
assert!(p.parse("inside(hypersphere{[0], 23, \"space\"})").is_ok());
assert!(p.parse("hypersphere{[0], 23}").is_ok());
assert!(p.parse("hypersphere{[0], 23, \"space\"}").is_ok());
}
#[test]
fn point() {
let p = filters_parser();
assert!(p.parse("inside(point{})").is_err());
assert!(p.parse("inside(point{[]})").is_err());
assert!(p.parse("point{}").is_err());
assert!(p.parse("point{[]}").is_err());
assert!(p.parse("inside(point{[0]})").is_ok());
assert!(p.parse("inside(point{[0], \"space\"})").is_ok());
assert!(p.parse("point{[0]}").is_ok());
assert!(p.parse("point{[0], \"space\"}").is_ok());
}
#[test]
@@ -579,30 +579,30 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), inside(point{{[0]}}))",
"filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(.field, \"\")"
)
.as_str()
)
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "[0]").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "[0]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "inside(point{[0]})").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "point{[0]}").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "{0}").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "{0}").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "").as_str())
.is_err());
}*/
@@ -612,15 +612,15 @@ mod parsing {
assert!(p
.parse(
format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str()
format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str()
)
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field)").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field)").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(\"\")").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(\"\")").as_str())
.is_err());
}
@@ -631,7 +631,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), inside(point{{[0]}}))",
"filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(.field, \"\")"
)
.as_str()
@@ -641,7 +641,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), inside(point{{[0]}}))",
"filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(.field)"
)
.as_str()
@@ -650,7 +650,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), inside(point{{[0]}}))",
"filter(=({}, [1]), point{{[0]}})",
"str_cmp_ignore_case(\"\")"
)
.as_str()
@@ -663,19 +663,19 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field[1].field").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field[1].field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field[1]").as_str())
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field[1]").as_str())
.is_ok());
}
@@ -684,26 +684,26 @@ mod parsing {
let p = filters_parser();
// Empty
assert!(p.parse(format!("inside(point{{{}}})", "[]").as_str()).is_err());
assert!(p.parse(format!("point{{{}}}", "[]").as_str()).is_err());
// Non-numerical coordinate:
assert!(p.parse(format!("inside(point{{{}}})", "[aa]").as_str()).is_err());
assert!(p.parse(format!("point{{{}}}", "[aa]").as_str()).is_err());
assert!(p
.parse(format!("inside(point{{{}}})", "[\"aa\"]").as_str())
.parse(format!("point{{{}}}", "[\"aa\"]").as_str())
.is_err());
// One or more coordinates
assert!(p.parse(format!("inside(point{{{}}})", "[0]").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{{}}})", "[0, 0]").as_str()).is_ok());
assert!(p.parse(format!("point{{{}}}", "[0]").as_str()).is_ok());
assert!(p.parse(format!("point{{{}}}", "[0, 0]").as_str()).is_ok());
assert!(p
.parse(format!("inside(point{{{}}})", "[0, 0, 0]").as_str())
.parse(format!("point{{{}}}", "[0, 0, 0]").as_str())
.is_ok());
assert!(p
.parse(format!("inside(point{{{}}})", "[0, 0, 0, 0]").as_str())
.parse(format!("point{{{}}}", "[0, 0, 0, 0]").as_str())
.is_ok());
assert!(p
.parse(format!("inside(point{{{}}})", "[0,0,0,0]").as_str())
.parse(format!("point{{{}}}", "[0,0,0,0]").as_str())
.is_ok());
}
@@ -713,66 +713,66 @@ mod parsing {
// Single dot
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".").as_str())
.is_ok());
// Check first character is within allowed characters
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".a").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".a").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", "._").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", "._").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".2").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".2").as_str())
.is_err());
// Check second character is within allowed characters
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".fa").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".fa").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f_").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f_").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.is_ok());
// Check we can add subscript
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".[23]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".[23]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[0]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[0]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[23]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[23]").as_str())
.is_ok());
// Invalid index values
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2.3]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2.3]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[02]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[02]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[-2]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[-2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2e2]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2e2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2E2]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2E2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[+2]").as_str())
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[+2]").as_str())
.is_err());
}
@@ -836,42 +836,42 @@ mod parsing {
// Integers
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "0").as_str())
.parse(format!("hypersphere{{[0],{}}}", "0").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+0").as_str())
.parse(format!("hypersphere{{[0],{}}}", "+0").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-0").as_str())
.parse(format!("hypersphere{{[0],{}}}", "-0").as_str())
.is_err());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "1").as_str())
.parse(format!("hypersphere{{[0],{}}}", "1").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+1").as_str())
.parse(format!("hypersphere{{[0],{}}}", "+1").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-1").as_str())
.parse(format!("hypersphere{{[0],{}}}", "-1").as_str())
.is_err());
// Floating point values
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "0.0").as_str())
.parse(format!("hypersphere{{[0],{}}}", "0.0").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.0").as_str())
.parse(format!("hypersphere{{[0],{}}}", "+0.0").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.0").as_str())
.parse(format!("hypersphere{{[0],{}}}", "-0.0").as_str())
.is_err());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "0.1").as_str())
.parse(format!("hypersphere{{[0],{}}}", "0.1").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.01").as_str())
.parse(format!("hypersphere{{[0],{}}}", "+0.01").as_str())
.is_ok());
assert!(p
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.01").as_str())
.parse(format!("hypersphere{{[0],{}}}", "-0.01").as_str())
.is_err());
}
@@ -880,20 +880,20 @@ mod parsing {
let p = filters_parser();
// Integers
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-1").as_str()).is_ok());
// Floating point values
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0.01").as_str()).is_ok());
}
#[test]
@@ -901,54 +901,54 @@ mod parsing {
let p = filters_parser();
// Integers
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "100").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "100").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "010").as_str()).is_err());
assert!(p.parse(format!("point{{[{}]}}", "010").as_str()).is_err());
// Floating point values (normalized)
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.").as_str()).is_err());
assert!(p.parse(format!("point{{[{}]}}", "0.").as_str()).is_err());
assert!(p
.parse(format!("inside(point{{[{}]}})", "0.1E03").as_str())
.parse(format!("point{{[{}]}}", "0.1E03").as_str())
.is_err());
assert!(p
.parse(format!("inside(point{{[{}]}})", "0.1E0.3").as_str())
.parse(format!("point{{[{}]}}", "0.1E0.3").as_str())
.is_err());
// Floating point values (denormalized)
assert!(p.parse(format!("inside(point{{[{}]}})", "1.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "10.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "10.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "01.1").as_str()).is_err());
assert!(p.parse(format!("point{{[{}]}}", "1.").as_str()).is_err());
assert!(p.parse(format!("point{{[{}]}}", "01.1").as_str()).is_err());
assert!(p
.parse(format!("inside(point{{[{}]}})", "1.1E03").as_str())
.parse(format!("point{{[{}]}}", "1.1E03").as_str())
.is_err());
assert!(p
.parse(format!("inside(point{{[{}]}})", "1.1E0.3").as_str())
.parse(format!("point{{[{}]}}", "1.1E0.3").as_str())
.is_err());
}
}

View File

@@ -11,10 +11,23 @@ pub enum LiteralTypes {
impl PartialEq for LiteralTypes {
fn eq(&self, other: &Self) -> bool {
match self {
LiteralTypes::String => matches!(other, LiteralTypes::String),
LiteralTypes::Int => matches!(other, LiteralTypes::Int),
LiteralTypes::Float => matches!(other, LiteralTypes::Float | LiteralTypes::Int),
LiteralTypes::Bag(_) => matches!(other, LiteralTypes::Bag(_)),
LiteralTypes::String => match other {
LiteralTypes::String => true,
_ => false,
},
LiteralTypes::Int => match other {
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Float => match other {
LiteralTypes::Float => true,
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Bag(_) => match other {
LiteralTypes::Bag(_) => true,
_ => false,
},
LiteralTypes::Vector(v) => match other {
LiteralTypes::Vector(ov) => {
let n = v.len();

View File

@@ -9,7 +9,7 @@ impl Validator for Projection {
fn validate(&self) -> ValidationResult {
match self {
Projection::Nifti(_, _, _) => Err("not yet implemented".to_string()),
Projection::Json(_, _format, bag) => bag.validate(),
Projection::JSON(_, _format, bag) => bag.validate(),
//FIXME: Add support for projections
/* match format.validate() {
Ok(_) => bag.validate(),
@@ -54,14 +54,21 @@ impl Validator for Bag {
}
match self {
Bag::ViewPort(bag) => bag.validate(),
Bag::Distinct(bag) => bag.validate(),
Bag::Filter(_, bag) => bag.validate(),
Bag::Filter(_, bag) => match bag {
None => Ok(LiteralPosition(vec![]).get_type()),
Some(b) => b.validate(),
},
Bag::Complement(bag) => bag.validate(),
Bag::Intersection(lh, rh) => compare_bag_types(lh, rh),
Bag::Union(lh, rh) => compare_bag_types(lh, rh),
Bag::Bag(bags) => {
for b in bags {
b.validate()?;
let t = b.validate();
if t.is_err() {
return t;
}
}
Ok(get_type())