Compare commits

...

9 Commits

Author SHA1 Message Date
521f40e36e Fixed most tests 2024-08-11 14:34:45 +02:00
c14c6cb91a Updated compiler and dependencies
* Upgraded to rustc 1.80
 * Updated dependencies
 * Fixed most linter warnings
2024-08-11 08:15:20 +02:00
2566cac17f Use iterators instead of materialized values 2021-03-01 08:40:02 +01:00
8a00180eb0 Update dependencies 2021-03-01 08:40:02 +01:00
8492fed85e Update the parser imports 2020-07-25 09:23:10 +02:00
69fbc9fdd8 Set the volume in the parser for Filters
When no volume is provided, create by default a volume containing the
whole space.

This simplifies handling later on, as there is no checks and on-the-fly
generation of that value necessary. This also remove life time issues as
the volume is always present with the same life time as the rest of the
Filter.
2020-07-25 08:49:36 +02:00
e4cbdf836f Rename library to mercator_parser 2020-04-01 18:14:45 +02:00
242de73053 Adding documentation 2020-04-01 17:06:52 +02:00
98b37e63b4 Silence warning in generated code 2020-03-17 17:18:50 +01:00
19 changed files with 748 additions and 680 deletions

View File

@@ -21,7 +21,7 @@ include = ["Cargo.toml", "README.md", "LICENSE", "ACKNOWLEDGEMENTS", "src/**/*.r
build = "build.rs" # LALRPOP preprocessing
[lib]
name = "parser"
name = "mercator_parser"
path = "src/lib.rs"
[[bin]]
@@ -29,23 +29,24 @@ name = "parser-driver"
path = "src/main.rs"
required-features = ["bin"]
[profile.release]
lto = true
[features]
bin = ["measure_time", "pretty_env_logger"]
[dependencies]
mercator_db = "^0.1"
mercator_db = "0.1"
lalrpop-util = "^0.17"
regex = "^1.2"
lalrpop-util = "0.20"
# Logging macros API
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] }
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs
pretty_env_logger = { version = "^0.3", optional = true } # Logger implementation
measure_time = { version = "^0.6", optional = true } # To mesure parsing time, only required by binary
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
[build-dependencies]
lalrpop = "^0.17.1"
lalrpop = "0.20"

View File

@@ -22,40 +22,6 @@ This enables the index implementations to be agnostic from the underlying data s
* Rust: https://www.rust-lang.org
## Quick start
## Building from sources
To build this project, you will need to run the following:
```sh
cargo build --release
```
### Installation
To install the software on the system you can use:
```sh
cargo install --release
```
### Usage
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin vehicula pretium
quam sit amet facilisis. Class aptent taciti sociosqu ad litora torquent per
conubia nostra, per inceptos himenaeos. Curabitur metus sapien, rhoncus vitae
eleifend nec, convallis vel nunc. Nulla metus mauris, porta eu porta eu,
vulputate et est. Suspendisse lacinia leo vel auctor aliquet. Maecenas non arcu
libero. Nulla ut eleifend dui. Cras bibendum pharetra facilisis. Proin mattis
libero non pharetra tristique. Nam massa nulla, ultrices pharetra quam a,
fermentum placerat dolor. Nullam mollis libero et neque lobortis, id dignissim
lectus dignissim. Maecenas ligula enim, congue in ornare vel, volutpat ut ante.
```sh
cargo run --release
```
## Documentation
For more information, please refer to the [documentation](https://epfl-dias.github.io/mercator_parser/).

1
book/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
book

6
book/book.toml Normal file
View File

@@ -0,0 +1,6 @@
[book]
authors = ["Lionel Sambuc"]
language = "en"
multilingual = false
src = "src"
title = "Mercator Parser"

5
book/src/SUMMARY.md Normal file
View File

@@ -0,0 +1,5 @@
# Summary
[Introduction](./introduction.md)
- [Filter Grammar](./filters.md)
- [Query Grammar](./queries.md)

10
book/src/filters.md Normal file
View File

@@ -0,0 +1,10 @@
# Filter Grammar
You will find below the definition of this SDL, for filtering data
from the index.
## filters.g4
```antlr
{{#include ../../Grammars/filters.g4}}
```

7
book/src/introduction.md Normal file
View File

@@ -0,0 +1,7 @@
# Introduction
To support volumetric queries for Mercator, a new domain-specific language (DSL) was created.
ANTLR was used to write and test the SDL, to check it stays simple
to parse and and fast to execute. The actual [parser](https://epfl-dias.github.io/mercator_parser/) and interpreter is
defined in rust, using [LALRPOP](https://docs.rs/lalrpop/0.18.1/lalrpop/).

9
book/src/queries.md Normal file
View File

@@ -0,0 +1,9 @@
# Query Grammar
You will find below the definition of this SDL, for queries. This builds on top of the [filters](filters.html) grammar.
## queries.g4
```antlr
{{#include ../../Grammars/queries.g4}}
```

2
rust-toolchain.toml Normal file
View File

@@ -0,0 +1,2 @@
[toolchain]
channel = "1.80.0"

View File

@@ -1,370 +1,358 @@
use std::collections::{HashMap, HashSet};
use std::rc::Rc;
use mercator_db::space;
use mercator_db::Core;
use mercator_db::CoreQueryParameters;
use mercator_db::Properties;
use mercator_db::IterObjects;
use mercator_db::IterObjectsBySpaces;
use super::expressions::*;
use super::symbols::*;
impl From<&LiteralPosition> for space::Position {
fn from(literal: &LiteralPosition) -> Self {
let v: Vec<f64> = literal.into();
v.into()
fn group_by_space<'s>(
list: IterObjectsBySpaces<'s>,
) -> Box<dyn Iterator<Item = (&'s String, IterObjects<'s>)> + 's> {
// Filter per Properties, in order to regroup by it, then build
// a single SpatialObject per Properties.
let mut hashmap = HashMap::new();
for (space, objects) in list {
hashmap.entry(space).or_insert_with(Vec::new).push(objects);
}
Box::new(hashmap.into_iter().map(|(space, objects)| {
let objects: IterObjects = Box::new(objects.into_iter().flatten());
(space, objects)
}))
}
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
fn distinct_helper(list: IterObjectsBySpaces) -> IterObjectsBySpaces {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(list)
// We would lose some objects otherwise when creating the
// HashMaps. Also this makes sure to keep the values are unique.
.map(|(space, iter)| {
let uniques: HashSet<_> = iter.collect();
let uniques: IterObjects = Box::new(uniques.into_iter());
(space, uniques)
})
.collect()
}
fn complement_helper<'c>(
core: &'c Core,
parameters: &CoreQueryParameters<'c>,
space_id: &str,
inside: Vec<(&'c String, Vec<(space::Position, &'c Properties)>)>,
) -> mercator_db::ResultSet<'c> {
fn into_positions_hashset(
objects_by_spaces: IterObjectsBySpaces,
) -> HashMap<&String, Rc<HashSet<space::Position>>> {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(objects_by_spaces)
// We would lose some objects otherwise when creating the HashSets.
.map(|(space, iter)| {
let hash_set: HashSet<_> = iter.map(|(position, _)| position).collect();
(space, Rc::new(hash_set))
})
.collect::<HashMap<_, _>>()
}
// Strictly not inside nor on the surface.
// TODO: inside must contains the valid positions in all expected spaces
fn complement_helper<'h>(
core: &'h Core,
parameters: &'h CoreQueryParameters<'h>,
space_id: &'h str,
inside: IterObjectsBySpaces<'h>,
) -> mercator_db::ResultSet<'h> {
let (low, high) = parameters.db.space(space_id)?.bounding_box();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(points) => {
let hashmap = inside.into_iter().collect::<HashMap<_, _>>();
let inside = into_positions_hashset(inside);
let points = core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
Ok(points
.into_iter()
.filter_map(|(space, v)| match hashmap.get(space) {
None => None,
Some(list) => {
Some((space, v.into_iter().filter(|t| !list.contains(t)).collect()))
}
})
.collect::<Vec<_>>())
}
}
let results = points
.into_iter()
.filter_map(move |(space, v)| match inside.get(space) {
None => None, // Space not found, so no point might exist!
Some(volume) => {
let volume = volume.clone();
let iter: IterObjects = Box::new(v.filter(move |a| !volume.contains(&a.0)));
Some((space, iter))
}
})
.collect();
Ok(results)
}
fn view_port<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
if let Some((low, high)) = parameters.view_port {
let vp = Bag::Inside(Shape::HyperRectangle(
bag.space().clone(),
vec![low.into(), high.into()],
));
intersection(core_id, parameters, &vp, bag)
} else {
bag.execute(core_id, parameters)
}
}
// Intersection based only on spatial positions!
fn intersect_helper<'h>(
smaller: IterObjectsBySpaces<'h>,
bigger: IterObjectsBySpaces<'h>,
) -> IterObjectsBySpaces<'h> {
let smaller = into_positions_hashset(smaller);
fn distinct<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(mut v) => {
let set: HashSet<_> = v.drain(..).collect(); // dedup
v.extend(set.into_iter());
bigger
.into_iter()
.filter_map(
move |(space, bigger_object_iter)| match smaller.get(space) {
None => None,
Some(volume) => {
let volume = volume.clone();
let filtered: IterObjects =
Box::new(bigger_object_iter.filter(move |a| volume.contains(&a.0)));
Ok(v)
}
}
}
fn filter_helper<'c>(
predicate: &Predicate,
bag: &Bag,
core_id: &str,
parameters: &CoreQueryParameters<'c>,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(results) => Ok(results
.into_iter()
.filter_map(|(space, positions)| {
let filtered = positions
.into_iter()
.filter(|(position, properties)| predicate.eval((space, position, properties)))
.collect::<Vec<_>>();
if filtered.is_empty() {
None
} else {
Some((space, filtered))
}
})
.collect::<Vec<_>>()),
}
},
)
.collect()
}
fn filter<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
predicate: &Option<Predicate>,
bag: &Option<Box<Bag>>,
) -> mercator_db::ResultSet<'c> {
match predicate {
None => {
if let Some(bag) = bag {
bag.execute(core_id, parameters)
} else {
Err("Filter without predicate nor data set.".to_string())
}
}
Some(predicate) => match bag {
None => {
let (low, high) = space::Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = Shape::HyperRectangle(
space::Space::universe().name().clone(),
vec![
LiteralPosition(
low.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
LiteralPosition(
high.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
],
);
filter_helper(predicate, &Bag::Inside(shape), core_id, parameters)
}
Some(bag) => filter_helper(predicate, bag.as_ref(), core_id, parameters),
},
}
}
impl Bag {
fn distinct<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
fn complement<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
bag: &Bag,
) -> mercator_db::ResultSet<'c> {
match bag.execute(core_id, parameters) {
// FIXME: The complement of a set is computed within its definition space.
e @ Err(_) => e,
Ok(inside) => complement_helper(
Ok(distinct_helper(results))
}
fn complement<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
core: &'b Core,
) -> mercator_db::ResultSet<'b> {
let inside = self.execute(core_id, parameters)?;
// FIXME: The complement of a set should be computed within its
// definition space. We don't know here so we use universe
complement_helper(
core,
parameters,
mercator_db::space::Space::universe().name(),
inside,
),
)
}
fn intersection<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let left = self.execute(core_id, parameters)?;
let right = rh.execute(core_id, parameters)?;
let v = if rh.predict(parameters.db) < self.predict(parameters.db) {
intersect_helper(right, left)
} else {
intersect_helper(left, right)
};
Ok(v)
}
fn union<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let mut left = self.execute(core_id, parameters)?;
let mut right = rh.execute(core_id, parameters)?;
let union = if rh.predict(parameters.db) < self.predict(parameters.db) {
left.append(&mut right);
left
} else {
right.append(&mut left);
right
};
Ok(union)
}
fn filter<'b>(
&'b self,
predicate: &'b Predicate,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
Ok(results
.into_iter()
.map(move |(space, positions)| {
let positions = positions.collect::<Vec<_>>();
(
space,
Box::new(positions.into_iter().filter(move |(position, properties)| {
predicate.eval((space, position, properties))
})) as IterObjects,
)
})
.collect())
}
}
fn intersection<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet<'c> {
let l = lh.execute(core_id, parameters);
if let Ok(l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(r) = r {
let mut v = vec![];
impl Shape {
fn inside<'s>(
&'s self,
parameters: &'s CoreQueryParameters<'s>,
core: &'s Core,
) -> mercator_db::ResultSet<'s> {
let db = parameters.db;
let param = match self {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
//FIXME: Support arbitrary HyperRectangles
Err(
"The number of position is different from 2, which is unsupported."
.to_string(),
)
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
if rh.predict(parameters.db) < lh.predict(parameters.db) {
for o in r {
if l.contains(&o) {
v.push(o);
}
}
} else {
for o in l {
if r.contains(&o) {
v.push(o);
}
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
Ok(v)
} else {
r
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
// We have to provide a position with all the dimensions
// for the encoding to work as expected.
let mut r = vec![0f64; position.dimensions()];
r[0] = radius.into();
let radius = space.encode(&r)?[0];
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, shape, space_id),
Err(e) => Err(e),
}
} else {
l
}
fn outside<'s>(
&'s self,
parameters: &'s CoreQueryParameters<'s>,
core: &'s Core,
) -> mercator_db::ResultSet<'s> {
let (space_id, inside) = match self {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
let positions = vec![position.into()];
let inside = core.get_by_positions(parameters, positions, space_id)?;
Ok((space_id, inside))
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(f64::EPSILON);
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
let inside =
core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
Ok((space_id, inside))
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= f64::EPSILON;
let center: space::Position = center.into();
let inside = core.get_by_shape(
parameters,
space::Shape::HyperSphere(center, radius.into()),
space_id,
)?;
Ok((space_id, inside))
}
Shape::Label(space_id, id) => {
let inside = core.get_by_label(parameters, id)?;
Ok((space_id, inside))
}
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}?;
complement_helper(core, parameters, space_id, inside)
}
}
fn union<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
rh: &Bag,
lh: &Bag,
fn filter<'c>(
core_id: &'c str,
parameters: &'c CoreQueryParameters<'c>,
predicate: &'c Option<Predicate>,
bag: &'c Bag,
) -> mercator_db::ResultSet<'c> {
let l = lh.execute(core_id, parameters);
if let Ok(mut l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(mut r) = r {
if rh.predict(parameters.db) < lh.predict(parameters.db) {
l.append(&mut r);
Ok(l)
} else {
r.append(&mut l);
Ok(r)
}
} else {
r
}
} else {
l
match predicate {
None => bag.execute(core_id, parameters),
Some(predicate) => bag.filter(predicate, core_id, parameters),
}
}
fn bag<'c>(
core_id: &str,
parameters: &CoreQueryParameters<'c>,
bags: &[Bag],
core_id: &'c str,
parameters: &'c CoreQueryParameters<'c>,
bags: &'c [Bag],
) -> mercator_db::ResultSet<'c> {
let mut v = vec![];
let mut results = Vec::new();
for bag in bags {
let b = bag.execute(core_id, parameters);
match b {
e @ Err(_) => {
return e;
}
Ok(mut b) => {
v.append(&mut b);
}
}
let mut result = bag.execute(core_id, parameters)?;
results.append(&mut result);
}
Ok(v)
}
fn inside<'c>(
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
shape: &Shape,
) -> mercator_db::ResultSet<'c> {
let db = parameters.db;
let param = match shape {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
Err("The number of position is different from 2, which is unsupported.".to_string())
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
let mut r = vec![];
for _ in 0..position.dimensions() {
r.push(radius.into());
}
let radius = space.encode(&r)?[0];
//FIXME: RADIUS IS A LENGTH, HOW TO ENCODE IT INTO THE SPACE?
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, &shape, space_id),
Err(e) => Err(e),
}
}
fn outside<'c>(
parameters: &CoreQueryParameters<'c>,
core: &'c Core,
shape: &Shape,
) -> mercator_db::ResultSet<'c> {
match shape {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
match core.get_by_positions(parameters, &[position.into()], space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(std::f64::EPSILON);
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= std::f64::EPSILON;
let center: space::Position = center.into();
match core.get_by_shape(
parameters,
&space::Shape::HyperSphere(center, radius.into()),
space_id,
) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::Label(_, _) => Err("Label: not yet implemented".to_string()),
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}
Ok(results)
}
impl<'e> Executor<'e> for Projection {
type ResultSet = mercator_db::ResultSet<'e>;
fn execute<'f: 'e>(
&self,
core_id: &str,
parameters: &CoreQueryParameters<'f>,
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
) -> Self::ResultSet {
match self {
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
Projection::JSON(_, _format, bag) => {
Projection::Json(_, _format, bag) => {
bag.execute(core_id, parameters)
// FIXME: Add projections here
}
@@ -375,27 +363,26 @@ impl<'e> Executor<'e> for Projection {
impl<'e> Executor<'e> for Bag {
type ResultSet = mercator_db::ResultSet<'e>;
fn execute<'f: 'e>(
&self,
core_id: &str,
parameters: &CoreQueryParameters<'f>,
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
) -> Self::ResultSet {
let core = parameters.db.core(core_id)?;
match self {
Bag::ViewPort(bag) => view_port(core_id, parameters, bag),
Bag::Distinct(bag) => distinct(core_id, parameters, bag),
Bag::Distinct(bag) => bag.distinct(core_id, parameters),
Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag),
Bag::Complement(bag) => complement(core_id, parameters, core, bag),
Bag::Intersection(lh, rh) => intersection(core_id, parameters, rh, lh),
Bag::Union(lh, rh) => union(core_id, parameters, rh, lh),
Bag::Complement(bag) => bag.complement(core_id, parameters, core),
Bag::Intersection(lh, rh) => lh.intersection(core_id, parameters, rh),
Bag::Union(lh, rh) => lh.union(core_id, parameters, rh),
Bag::Bag(list) => bag(core_id, parameters, list),
Bag::Inside(shape) => inside(parameters, core, shape),
Bag::Inside(shape) => shape.inside(parameters, core),
Bag::Outside(shape) => {
//FIXME: This is currently computed as the complement of the values within the shape, except its surface.
// Should this be instead a list of positions within the shape?
//FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested?
outside(parameters, core, shape)
shape.outside(parameters, core)
}
}
}

View File

@@ -14,10 +14,10 @@ pub trait Predictor {
pub trait Executor<'e> {
type ResultSet;
fn execute<'f: 'e>(
&self,
core_id: &str,
parameters: &CoreQueryParameters<'f>,
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
) -> Self::ResultSet;
}

View File

@@ -1,17 +1,57 @@
#![forbid(unsafe_code)]
//! # Mercator Parser
//!
//! Query parser for Mercator.
//!
//! ## Mercator: Spatial Index
//!
//! **Mercator** is a spatial *volumetric* index for the
//! [Human Brain Project]. It is a component of the [Knowledge Graph]
//! service, which provides the spatial anchoring for the metadata
//! registered as well as processes the volumetric queries.
//!
//! It is build on top of the Iron Sea database toolkit.
//!
//! ## Iron Sea: Database Toolkit
//! **Iron Sea** provides a set of database engine bricks, which can be
//! combined and applied on arbitrary data structures.
//!
//! Unlike a traditional database, it does not assume a specific
//! physical structure for the tables nor the records, but relies on the
//! developer to provide a set of extractor functions which are used by
//! the specific indices provided.
//!
//! This enables the index implementations to be agnostic from the
//! underlying data structure, and re-used.
//!
//! [Human Brain Project]: http://www.humanbrainproject.eu
//! [Knowledge Graph]: http://www.humanbrainproject.eu/en/explore-the-brain/search/
#[macro_use]
extern crate lalrpop_util;
lalrpop_mod!(#[allow(clippy::all)] pub queries); // synthesized by LALRPOP
lalrpop_mod!(#[allow(clippy::all,unused_parens)] pub queries); // synthesized by LALRPOP
// Note: We do not enable for the whole library deny(missing_docs), as
// it requires the automatically generated parser to be documented
// as well.
// Instead we enable it per modules below, except for the tests.
//#[warn(missing_docs)]
mod evaluators;
//#[warn(missing_docs)]
mod executors;
//#[warn(missing_docs)]
mod expressions;
//#[warn(missing_docs)]
mod predictors;
//#[warn(missing_docs)]
mod validators;
//#[warn(missing_docs)]
mod symbols;
//#[warn(missing_docs)]
mod types;
pub use expressions::Executor;
@@ -19,6 +59,8 @@ pub use expressions::Predictor;
pub use expressions::Validator;
pub use queries::FiltersParser;
pub use queries::QueryParser;
pub use symbols::Bag;
pub use symbols::Projection;
pub use validators::ValidationResult;
#[cfg(test)]

View File

@@ -7,11 +7,11 @@ use std::io;
use mercator_db::CoreQueryParameters;
use mercator_db::DataBase;
use parser::Executor;
use parser::FiltersParser;
use parser::Predictor;
use parser::QueryParser;
use parser::Validator;
use mercator_parser::Executor;
use mercator_parser::FiltersParser;
use mercator_parser::Predictor;
use mercator_parser::QueryParser;
use mercator_parser::Validator;
fn main() {
// If RUST_LOG is unset, set it to INFO, otherwise keep it as-is.
@@ -93,12 +93,17 @@ fn main() {
execute = t.execute(core, &parameters);
}
if let Ok(r) = execute {
//let r = mercator_db::json::model::to_spatial_objects(r);
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r.len());
} else {
info!("Execution: \n{:?}", execute);
match execute {
Ok(r) => {
let r = r
.into_iter()
.map(|(space, objects)| (space, objects.collect::<Vec<_>>()))
.collect::<Vec<_>>();
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r[0].1.len());
}
Err(e) => info!("Execution: \n{:?}", e),
}
}
}

View File

@@ -1,4 +1,3 @@
use mercator_db::space;
use mercator_db::DataBase;
use super::expressions::Predictor;
@@ -8,7 +7,7 @@ impl Predictor for Projection {
fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self {
Projection::Nifti(_, _, bag) => bag.predict(db),
Projection::JSON(_, _, bag) => bag.predict(db),
Projection::Json(_, _, bag) => bag.predict(db),
}
}
}
@@ -16,12 +15,8 @@ impl Predictor for Projection {
impl Predictor for Bag {
fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self {
Bag::ViewPort(bag) => bag.predict(db),
Bag::Distinct(bag) => bag.predict(db),
Bag::Filter(_, bag) => match bag {
None => Ok(db.space(space::Space::universe().name())?.volume()),
Some(b) => b.predict(db),
},
Bag::Filter(_, bag) => bag.predict(db),
Bag::Complement(bag) => Ok(db.space(bag.space())?.volume() - bag.predict(db)?),
Bag::Intersection(lh, rh) => {
let l = lh.predict(db)?;

View File

@@ -51,7 +51,7 @@ JsonOperator: symbols::Projection = {
None => Space::universe().name().clone(),
};
symbols::Projection::JSON(space_id, f, b)
symbols::Projection::Json(space_id, f, b)
}
};
@@ -137,8 +137,6 @@ Aggregations: symbols::Aggregation = {
//*********************************************************************/
pub Filters: symbols::Bag = {
<Bags>
//<Bags> =>
// symbols::Bag::ViewPort(Box::new(<>))
};
// All these expressions generate bags.
@@ -189,13 +187,25 @@ Union: symbols::Bag = {
Filter: symbols::Bag = {
// "filter" "(" <p:Predicates> "," <b:Bags> ")" =>
"filter" "(" <b:Bags> ")" =>
symbols::Bag::Filter(None, Some(Box::new(b))),
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" =>
symbols::Bag::Filter(None, Box::new(b)),
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" => {
match b {
None => symbols::Bag::Filter(Some(p), None),
Some(b) => symbols::Bag::Filter(Some(p), Some(Box::new(b))),
None => {
let (low, high) = Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = symbols::Shape::HyperRectangle(
Space::universe().name().clone(),
vec![
symbols::LiteralPosition(low.into_iter().map(symbols::LiteralNumber::Float).collect()),
symbols::LiteralPosition(high.into_iter().map(symbols::LiteralNumber::Float).collect()),
],
);
symbols::Bag::Filter(Some(p), Box::new(symbols::Bag::Inside(shape)))
}
Some(b) => symbols::Bag::Filter(Some(p), Box::new(b)),
}
},
};
Predicates: symbols::Predicate = {

View File

@@ -11,14 +11,14 @@ pub use super::types::*;
#[derive(Clone, Debug)]
pub enum Projection {
Nifti(String, LiteralSelector, Bag),
JSON(String, JsonValue, Bag),
Json(String, JsonValue, Bag),
}
impl Projection {
pub fn space(&self) -> &String {
match self {
Projection::Nifti(space, _, _) => &space,
Projection::JSON(space, _, _) => &space,
Projection::Nifti(space, _, _) => space,
Projection::Json(space, _, _) => space,
}
}
}
@@ -57,11 +57,9 @@ struct Transform {
/**********************************************************************/
#[derive(Clone, Debug)]
pub enum Bag {
// This is an implicit operator, inserted by the parser. Never to be used directly.
ViewPort(Box<Bag>),
// Bags
Distinct(Box<Bag>),
Filter(Option<Predicate>, Option<Box<Bag>>),
Filter(Option<Predicate>, Box<Bag>),
Complement(Box<Bag>),
Intersection(Box<Bag>, Box<Bag>),
Union(Box<Bag>, Box<Bag>),
@@ -75,12 +73,8 @@ pub enum Bag {
impl Bag {
pub fn space(&self) -> &String {
match self {
Bag::ViewPort(bag) => bag.space(),
Bag::Distinct(bag) => bag.space(),
Bag::Filter(_, bag) => match bag {
None => space::Space::universe().name(),
Some(b) => b.space(),
},
Bag::Filter(_, bag) => bag.space(),
Bag::Complement(bag) => bag.space(),
Bag::Intersection(lh, _) => {
// We are assuming lh and rh are in the same space.
@@ -144,7 +138,7 @@ impl Shape {
pub fn volume(&self) -> f64 {
match self {
Shape::Point(_, _) => std::f64::EPSILON, // The smallest non-zero volume possible
Shape::Point(_, _) => f64::EPSILON, // The smallest non-zero volume possible
Shape::HyperRectangle(_space, pos) => {
//TODO: At this time, only aligned to the axes, defined by two points, hyperrectangles are supported.
assert_eq!(pos.len(), 2);
@@ -208,7 +202,7 @@ impl Shape {
}
Shape::Label(_, _) => {
// FIXME: Needs to find a way to figure out the approximate volume of this specific ID, or return MAX or MIN..
std::f64::EPSILON
f64::EPSILON
}
Shape::Nifti(_) => unimplemented!("Nifti"),
}
@@ -243,7 +237,9 @@ impl Position {
Ordering::Greater => 1,
Ordering::Less => -1,
};
LiteralPosition(vec![LiteralNumber::Int(x)])
let v = vec![LiteralNumber::Int(x)];
LiteralPosition(v)
}
}
}
@@ -262,14 +258,33 @@ pub enum LiteralNumber {
Float(f64),
}
impl From<&LiteralNumber> for Vec<f64> {
impl From<&LiteralNumber> for f64 {
fn from(l: &LiteralNumber) -> Self {
let r = match l {
match l {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
};
}
}
}
vec![r]
impl From<LiteralNumber> for f64 {
fn from(l: LiteralNumber) -> Self {
(&l).into()
}
}
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
}
impl From<LiteralNumber> for space::Coordinate {
fn from(literal: LiteralNumber) -> Self {
(&literal).into()
}
}
@@ -294,7 +309,7 @@ pub struct LiteralPosition(pub Vec<LiteralNumber>);
impl LiteralPosition {
pub fn get_type(&self) -> LiteralTypes {
let Self(v) = self;
let mut t = Vec::new();
let mut t = Vec::with_capacity(v.len());
for n in v {
t.push(match n {
@@ -327,34 +342,35 @@ impl LiteralPosition {
}
}
impl From<&LiteralNumber> for f64 {
fn from(l: &LiteralNumber) -> Self {
match l {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
impl From<&LiteralPosition> for Vec<f64> {
fn from(l: &LiteralPosition) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//let LiteralPosition(v) = l;
//v.iter().map(|literal| literal.into()).collect()
let LiteralPosition(v) = l;
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(value.into());
}
lv
}
}
impl From<&LiteralPosition> for Vec<f64> {
fn from(l: &LiteralPosition) -> Self {
let LiteralPosition(v) = l;
let mut r = Vec::with_capacity(v.len());
for x in v {
let x = match x {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
};
r.push(x);
}
r
impl From<LiteralPosition> for Vec<f64> {
fn from(l: LiteralPosition) -> Self {
(&l).into()
}
}
impl From<&Vec<f64>> for LiteralPosition {
fn from(v: &Vec<f64>) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//LiteralPosition(v.iter().map(|value| LiteralNumber::Float(*value)).collect())
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(LiteralNumber::Float(*value));
@@ -363,10 +379,36 @@ impl From<&Vec<f64>> for LiteralPosition {
LiteralPosition(lv)
}
}
impl From<Vec<f64>> for LiteralPosition {
fn from(v: Vec<f64>) -> Self {
(&v).into()
}
}
impl From<&space::Position> for LiteralPosition {
fn from(position: &space::Position) -> Self {
let lv: Vec<f64> = position.into();
(&lv).into()
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<space::Position> for LiteralPosition {
fn from(position: space::Position) -> Self {
(&position).into()
}
}
impl From<&LiteralPosition> for space::Position {
fn from(position: &LiteralPosition) -> Self {
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<LiteralPosition> for space::Position {
fn from(position: LiteralPosition) -> Self {
(&position).into()
}
}

View File

@@ -16,7 +16,7 @@ mod parsing {
fn query() {
let p = query_parser();
let nifti = "nifti(point{[0]})";
let nifti = "nifti(inside(point{[0]}))";
// Option is Empty
assert!(p.parse("").is_ok());
@@ -47,14 +47,14 @@ mod parsing {
let p = query_parser();
// Check allowed forms of the operator
assert!(p.parse("nifti(point{[0]})").is_ok());
assert!(p.parse("nifti(.properties.id, point{[0]})").is_ok());
assert!(p.parse("nifti(inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(.properties.id, inside(point{[0]}))").is_ok());
unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE.
//FIXME: THIS SHOULD BE ALLOWED
assert!(p.parse("nifti(2, point{[0]})").is_ok());
assert!(p.parse("nifti(2.23, point{[0]})").is_ok());
assert!(p.parse("nifti(2, inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(2.23, inside(point{[0]}))").is_ok());
//FIXME: SYNTAX OK, TYPE NOT
assert!(p.parse("nifti(point{[0], \"space\"})").is_err());
@@ -64,16 +64,16 @@ mod parsing {
fn json_operator() {
let p = query_parser();
assert!(p.parse("json(true, point{[0]})").is_ok());
assert!(p.parse("json(23, point{[0]})").is_ok());
assert!(p.parse("json([23, 24], point{[0]})").is_ok());
assert!(p.parse("json([23, count(.)], point{[0]})").is_ok());
assert!(p.parse("json(true, inside(point{[0]}))").is_ok());
assert!(p.parse("json(23, inside(point{[0]}))").is_ok());
assert!(p.parse("json([23, 24], inside(point{[0]}))").is_ok());
assert!(p.parse("json([23, count(.)], inside(point{[0]}))").is_ok());
assert!(p.parse("json(true)").is_err());
assert!(p.parse("json(true,)").is_err());
assert!(p.parse("json(, point{[0]})").is_err());
assert!(p.parse("json(point{[0]})").is_err());
assert!(p.parse("json(, inside(point{[0]}))").is_err());
assert!(p.parse("json(inside(point{[0]}))").is_err());
assert!(p.parse("json(true, point)").is_err());
}
@@ -83,24 +83,24 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "true").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "true").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "false").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "false").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "null").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "null").as_str())
.is_ok());
// Incorrect capitalisation
assert!(p
.parse(format!("json({}, point{{[0]}})", "True").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "True").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "False").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "False").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "Null").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "Null").as_str())
.is_err());
}
@@ -109,24 +109,24 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "{}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0, \"field1\": 1}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0, \"field1\": 1}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": [0, 1]}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": [0, 1]}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": {\"field1\": 0}}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": {\"field1\": 0}}").as_str())
.is_ok());
assert!(p
.parse(
format!(
"json({}, point{{[0]}})",
"json({}, inside(point{{[0]}}))",
"{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}"
)
.as_str()
@@ -139,25 +139,25 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "{:}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{:}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{field: 0}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{field: 0}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{0: 0}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{0: 0}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"0\": }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": }").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"0\": 0 }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": 0 }").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0 }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0 }").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": \"0\" }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": \"0\" }").as_str())
.is_ok());
}
@@ -166,20 +166,20 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "[, 0]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[, 0]").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "[]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[]").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "[0]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[0]").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "[0, 1]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[0, 1]").as_str())
.is_ok());
assert!(p
.parse(
format!("json({}, point{{[0]}})", "[{\"field\": 0}, {\"field\": 1}]").as_str()
format!("json({}, inside(point{{[0]}}))", "[{\"field\": 0}, {\"field\": 1}]").as_str()
)
.is_ok());
}
@@ -190,40 +190,40 @@ mod parsing {
// count ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "count()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "count(distinct)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct)").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "count(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count(.)").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "count(distinct .)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct .)").as_str())
.is_ok());
// sum ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "sum()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "sum()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "sum(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "sum(.)").as_str())
.is_ok());
// min ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "min()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "min()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "min(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "min(.)").as_str())
.is_ok());
// max ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "max()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "max()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "max(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "max(.)").as_str())
.is_ok());
}
@@ -233,42 +233,42 @@ mod parsing {
// Integers
assert!(p
.parse(format!("json({}, point{{[0]}})", "0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+0").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "1").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+1").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-1").as_str())
.is_ok());
// Floating point values
assert!(p
.parse(format!("json({}, point{{[0]}})", "0.0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "0.0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+0.0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+0.0").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-0.0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-0.0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "0.1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "0.1").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+0.01").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+0.01").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-0.01").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-0.01").as_str())
.is_ok());
}
}
@@ -290,7 +290,7 @@ mod parsing {
assert!(p.parse("").is_err());
assert!(p.parse("point{[0]}").is_ok());
assert!(p.parse("inside(point{[0]})").is_ok());
}
/* Not useful to test this rule
@@ -305,7 +305,7 @@ mod parsing {
assert!(p.parse("distinct()").is_err());
assert!(p.parse("distinct(point{[0]})").is_ok());
assert!(p.parse("distinct(inside(point{[0]}))").is_ok());
}
#[test]
@@ -314,7 +314,7 @@ mod parsing {
assert!(p.parse("complement()").is_err());
assert!(p.parse("complement(point{[0]})").is_ok());
assert!(p.parse("complement(inside(point{[0]}))").is_ok());
}
#[test]
@@ -322,12 +322,12 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("intersection()").is_err());
assert!(p.parse("intersection(point{[0]})").is_err());
assert!(p.parse("intersection(inside(point{[0]}))").is_err());
assert!(p
.parse("intersection(point{[0]}, point{[0]}, point{[0]})")
.parse("intersection(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
.is_err());
assert!(p.parse("intersection(point{[0]}, point{[0]})").is_ok());
assert!(p.parse("intersection(inside(point{[0]}), inside(point{[0]}))").is_ok());
}
#[test]
@@ -335,12 +335,12 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("union()").is_err());
assert!(p.parse("union(point{[0]})").is_err());
assert!(p.parse("union(inside(point{[0]}))").is_err());
assert!(p
.parse("union(point{[0]}, point{[0]}, point{[0]})")
.parse("union(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
.is_err());
assert!(p.parse("union(point{[0]}, point{[0]})").is_ok());
assert!(p.parse("union(inside(point{[0]}), inside(point{[0]}))").is_ok());
}
#[test]
@@ -348,10 +348,10 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("filter()").is_err());
assert!(p.parse("filter(point{[0]})").is_ok());
assert!(p.parse("filter(inside(point{[0]}))").is_ok());
assert!(p.parse("filter(=(., [0]))").is_ok());
assert!(p.parse("filter(=(., [0]), point{[0]})").is_ok());
assert!(p.parse("filter(=(., [0]), inside(point{[0]}))").is_ok());
}
/* Not useful to test this rule
@@ -365,17 +365,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<(., [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<(, [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<(.)").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<()").as_str())
.is_err());
}
@@ -384,17 +384,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">(., [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">(, [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">(.)").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">()").as_str())
.is_err());
}
@@ -403,17 +403,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=(., [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=(, [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=(.)").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=()").as_str())
.is_err());
}
@@ -422,11 +422,11 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "!(=(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "!(=(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "!()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "!()").as_str())
.is_err());
}
@@ -435,17 +435,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&(=(., [0]), =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&(=(., [0]), =(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&(, =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&(, =(., [0]))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&(|(=(., [0])))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&(|(=(., [0])))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&()").as_str())
.is_err());
}
@@ -454,17 +454,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|(=(., [0]), =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|(=(., [0]), =(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|(, =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|(, =(., [0]))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|(|(=(., [0])))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|(|(=(., [0])))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|()").as_str())
.is_err());
}
@@ -474,11 +474,11 @@ mod parsing {
assert!(p.parse("bag{}").is_err());
assert!(p.parse("bag{point{[0]}}").is_ok());
assert!(p.parse("bag{point{[0]}, point{[0]}}").is_ok());
assert!(p.parse("bag{point{[0]}, point{[0]}, point{[0]}}").is_ok());
assert!(p.parse("bag{inside(point{[0]})}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]})}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]}), inside(point{[0]})}").is_ok());
assert!(p
.parse("bag{point{[0]}, hypersphere{[0], 1}, hyperrectangle{[0], [1]}}")
.parse("bag{inside(point{[0]}), inside(hypersphere{[0], 1}), inside(hyperrectangle{[0], [1]})}")
.is_ok());
}
@@ -518,21 +518,21 @@ mod parsing {
// At least two positions when it is aligned with the axis, otherwise an even number
// of positions, as the number of vertices follows the rule 2**k, where k is the number
// of dimensions of the space containing the hyperrectangle.
assert!(p.parse("hyperrectangle{}").is_err());
assert!(p.parse("hyperrectangle{[]}").is_err());
assert!(p.parse("hyperrectangle{[0]}").is_err());
assert!(p.parse("hyperrectangle{[0], [1], [2]}").is_err());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3], [4]}").is_err());
assert!(p.parse("inside(hyperrectangle{})").is_err());
assert!(p.parse("inside(hyperrectangle{[]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3], [4]})").is_err());
assert!(p.parse("hyperrectangle{[0], [1]}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], \"space\"}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1]})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], \"space\"})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
assert!(p
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5]}")
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5]})")
.is_ok());
assert!(p
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"}")
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"})")
.is_ok());
}
@@ -540,23 +540,23 @@ mod parsing {
fn hyperrsphere() {
let p = filters_parser();
assert!(p.parse("hypersphere{}").is_err());
assert!(p.parse("hypersphere{[]}").is_err());
assert!(p.parse("hypersphere{[0]}").is_err());
assert!(p.parse("inside(hypersphere{}").is_err());
assert!(p.parse("inside(hypersphere{[]})").is_err());
assert!(p.parse("inside(hypersphere{[0]})").is_err());
assert!(p.parse("hypersphere{[0], 23}").is_ok());
assert!(p.parse("hypersphere{[0], 23, \"space\"}").is_ok());
assert!(p.parse("inside(hypersphere{[0], 23})").is_ok());
assert!(p.parse("inside(hypersphere{[0], 23, \"space\"})").is_ok());
}
#[test]
fn point() {
let p = filters_parser();
assert!(p.parse("point{}").is_err());
assert!(p.parse("point{[]}").is_err());
assert!(p.parse("inside(point{})").is_err());
assert!(p.parse("inside(point{[]})").is_err());
assert!(p.parse("point{[0]}").is_ok());
assert!(p.parse("point{[0], \"space\"}").is_ok());
assert!(p.parse("inside(point{[0]})").is_ok());
assert!(p.parse("inside(point{[0], \"space\"})").is_ok());
}
#[test]
@@ -579,30 +579,30 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(.field, \"\")"
)
.as_str()
)
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "[0]").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "[0]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "point{[0]}").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "inside(point{[0]})").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "{0}").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "{0}").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "").as_str())
.is_err());
}*/
@@ -612,15 +612,15 @@ mod parsing {
assert!(p
.parse(
format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str()
format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str()
)
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field)").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field)").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(\"\")").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(\"\")").as_str())
.is_err());
}
@@ -631,7 +631,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(.field, \"\")"
)
.as_str()
@@ -641,7 +641,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(.field)"
)
.as_str()
@@ -650,7 +650,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(\"\")"
)
.as_str()
@@ -663,19 +663,19 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field[1].field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field[1].field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field[1]").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field[1]").as_str())
.is_ok());
}
@@ -684,26 +684,26 @@ mod parsing {
let p = filters_parser();
// Empty
assert!(p.parse(format!("point{{{}}}", "[]").as_str()).is_err());
assert!(p.parse(format!("inside(point{{{}}})", "[]").as_str()).is_err());
// Non-numerical coordinate:
assert!(p.parse(format!("point{{{}}}", "[aa]").as_str()).is_err());
assert!(p.parse(format!("inside(point{{{}}})", "[aa]").as_str()).is_err());
assert!(p
.parse(format!("point{{{}}}", "[\"aa\"]").as_str())
.parse(format!("inside(point{{{}}})", "[\"aa\"]").as_str())
.is_err());
// One or more coordinates
assert!(p.parse(format!("point{{{}}}", "[0]").as_str()).is_ok());
assert!(p.parse(format!("point{{{}}}", "[0, 0]").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{{}}})", "[0]").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{{}}})", "[0, 0]").as_str()).is_ok());
assert!(p
.parse(format!("point{{{}}}", "[0, 0, 0]").as_str())
.parse(format!("inside(point{{{}}})", "[0, 0, 0]").as_str())
.is_ok());
assert!(p
.parse(format!("point{{{}}}", "[0, 0, 0, 0]").as_str())
.parse(format!("inside(point{{{}}})", "[0, 0, 0, 0]").as_str())
.is_ok());
assert!(p
.parse(format!("point{{{}}}", "[0,0,0,0]").as_str())
.parse(format!("inside(point{{{}}})", "[0,0,0,0]").as_str())
.is_ok());
}
@@ -713,66 +713,66 @@ mod parsing {
// Single dot
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".").as_str())
.is_ok());
// Check first character is within allowed characters
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".a").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".a").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", "._").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", "._").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".2").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".2").as_str())
.is_err());
// Check second character is within allowed characters
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".fa").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".fa").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f_").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f_").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
.is_ok());
// Check we can add subscript
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".[23]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".[23]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[0]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[0]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[23]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[23]").as_str())
.is_ok());
// Invalid index values
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2.3]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2.3]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[02]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[02]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[-2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[-2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2e2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2e2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2E2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2E2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[+2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[+2]").as_str())
.is_err());
}
@@ -836,42 +836,42 @@ mod parsing {
// Integers
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-0").as_str())
.is_err());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "1").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+1").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-1").as_str())
.is_err());
// Floating point values
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "0.0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "0.0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+0.0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-0.0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.0").as_str())
.is_err());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "0.1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "0.1").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+0.01").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.01").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-0.01").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.01").as_str())
.is_err());
}
@@ -880,20 +880,20 @@ mod parsing {
let p = filters_parser();
// Integers
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-1").as_str()).is_ok());
// Floating point values
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.01").as_str()).is_ok());
}
#[test]
@@ -901,54 +901,54 @@ mod parsing {
let p = filters_parser();
// Integers
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "100").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "100").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "010").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "010").as_str()).is_err());
// Floating point values (normalized)
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.").as_str()).is_err());
assert!(p
.parse(format!("point{{[{}]}}", "0.1E03").as_str())
.parse(format!("inside(point{{[{}]}})", "0.1E03").as_str())
.is_err());
assert!(p
.parse(format!("point{{[{}]}}", "0.1E0.3").as_str())
.parse(format!("inside(point{{[{}]}})", "0.1E0.3").as_str())
.is_err());
// Floating point values (denormalized)
assert!(p.parse(format!("point{{[{}]}}", "1.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "10.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "10.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.").as_str()).is_err());
assert!(p.parse(format!("point{{[{}]}}", "01.1").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "01.1").as_str()).is_err());
assert!(p
.parse(format!("point{{[{}]}}", "1.1E03").as_str())
.parse(format!("inside(point{{[{}]}})", "1.1E03").as_str())
.is_err());
assert!(p
.parse(format!("point{{[{}]}}", "1.1E0.3").as_str())
.parse(format!("inside(point{{[{}]}})", "1.1E0.3").as_str())
.is_err());
}
}

View File

@@ -11,23 +11,10 @@ pub enum LiteralTypes {
impl PartialEq for LiteralTypes {
fn eq(&self, other: &Self) -> bool {
match self {
LiteralTypes::String => match other {
LiteralTypes::String => true,
_ => false,
},
LiteralTypes::Int => match other {
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Float => match other {
LiteralTypes::Float => true,
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Bag(_) => match other {
LiteralTypes::Bag(_) => true,
_ => false,
},
LiteralTypes::String => matches!(other, LiteralTypes::String),
LiteralTypes::Int => matches!(other, LiteralTypes::Int),
LiteralTypes::Float => matches!(other, LiteralTypes::Float | LiteralTypes::Int),
LiteralTypes::Bag(_) => matches!(other, LiteralTypes::Bag(_)),
LiteralTypes::Vector(v) => match other {
LiteralTypes::Vector(ov) => {
let n = v.len();

View File

@@ -9,7 +9,7 @@ impl Validator for Projection {
fn validate(&self) -> ValidationResult {
match self {
Projection::Nifti(_, _, _) => Err("not yet implemented".to_string()),
Projection::JSON(_, _format, bag) => bag.validate(),
Projection::Json(_, _format, bag) => bag.validate(),
//FIXME: Add support for projections
/* match format.validate() {
Ok(_) => bag.validate(),
@@ -54,21 +54,14 @@ impl Validator for Bag {
}
match self {
Bag::ViewPort(bag) => bag.validate(),
Bag::Distinct(bag) => bag.validate(),
Bag::Filter(_, bag) => match bag {
None => Ok(LiteralPosition(vec![]).get_type()),
Some(b) => b.validate(),
},
Bag::Filter(_, bag) => bag.validate(),
Bag::Complement(bag) => bag.validate(),
Bag::Intersection(lh, rh) => compare_bag_types(lh, rh),
Bag::Union(lh, rh) => compare_bag_types(lh, rh),
Bag::Bag(bags) => {
for b in bags {
let t = b.validate();
if t.is_err() {
return t;
}
b.validate()?;
}
Ok(get_type())