Compare commits

...

20 Commits

Author SHA1 Message Date
521f40e36e Fixed most tests 2024-08-11 14:34:45 +02:00
c14c6cb91a Updated compiler and dependencies
* Upgraded to rustc 1.80
 * Updated dependencies
 * Fixed most linter warnings
2024-08-11 08:15:20 +02:00
2566cac17f Use iterators instead of materialized values 2021-03-01 08:40:02 +01:00
8a00180eb0 Update dependencies 2021-03-01 08:40:02 +01:00
8492fed85e Update the parser imports 2020-07-25 09:23:10 +02:00
69fbc9fdd8 Set the volume in the parser for Filters
When no volume is provided, create by default a volume containing the
whole space.

This simplifies handling later on, as there is no checks and on-the-fly
generation of that value necessary. This also remove life time issues as
the volume is always present with the same life time as the rest of the
Filter.
2020-07-25 08:49:36 +02:00
e4cbdf836f Rename library to mercator_parser 2020-04-01 18:14:45 +02:00
242de73053 Adding documentation 2020-04-01 17:06:52 +02:00
98b37e63b4 Silence warning in generated code 2020-03-17 17:18:50 +01:00
e2ea5c9ba4 Introduce Label for search within id
This allows to define a volume using an indexed object.
2020-03-16 13:48:01 +01:00
0dc31c65c6 Remove some .unwrap() calls.
The parser still contains three calls to `unwrap()`, but a this point
I have not yet figured out how to remove them.
2020-01-20 14:53:09 +01:00
e8d931b551 Prevent unsafe blocks for now 2020-01-20 14:53:09 +01:00
b8baee8019 Reduce dependencies. 2020-01-14 18:10:08 +01:00
f95aaa389b Updating test index to new format 2019-11-14 15:30:57 +01:00
a31ce4387f Prevent some allocations 2019-11-13 11:04:29 +01:00
926b879426 Fixes 2019-10-30 17:44:52 +01:00
e1547206e2 Updating to new Index<> API 2019-10-29 15:13:19 +01:00
8dd9250dbb Maybe do not wrap at the top-level 2019-10-18 12:27:30 +02:00
661abbc9f4 Add a new symbol, ViewPort
This is used to wrap the filters within an implicit bounding box,
which corresponds to the view port optionally provided by the users.

The intersection is automatically computed between the query results
and the ViewPort, whenever it is specified.
2019-10-18 12:27:30 +02:00
927feb0297 Actually use the smallest value possible 2019-10-18 12:27:30 +02:00
23 changed files with 859 additions and 611 deletions

BIN
10k.index

Binary file not shown.

View File

@@ -21,26 +21,32 @@ include = ["Cargo.toml", "README.md", "LICENSE", "ACKNOWLEDGEMENTS", "src/**/*.r
build = "build.rs" # LALRPOP preprocessing
[lib]
name = "parser"
name = "mercator_parser"
path = "src/lib.rs"
[[bin]]
name = "parser-driver"
path = "src/main.rs"
required-features = ["bin"]
[profile.release]
lto = true
[features]
bin = ["measure_time", "pretty_env_logger"]
[dependencies]
mercator_db = "^0.1"
mercator_db = "0.1"
lalrpop-util = "^0.17"
regex = "^1.2"
measure_time = "^0.6" # To mesure parsing time, only required by binary
lalrpop-util = "0.20"
# Logging macros API
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] }
pretty_env_logger = "^0.3" # Logger implementation
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
[build-dependencies]
lalrpop = "^0.17.1"
lalrpop = "0.20"

View File

@@ -19,6 +19,7 @@ bag_expression
// Spatial Operators
| inside
| outside
//| shape
;
/**********************************************************************/
@@ -113,6 +114,11 @@ inside
: 'inside' '(' shapes ')'
;
/* Returns the set of positions inside the shape, (face included) */
shape
: 'shape' '(' shapes ')'
;
/**********************************************************************/
/* SHAPES */
/**********************************************************************/

View File

@@ -18,7 +18,7 @@ projection_operators
*
* If it is provided, it MUST resolve to a NUMBER. */
nifti_operator
: 'nifti' '(' ( STRING ',' )? ( selector ',' )? bag_expression ')'
: 'nifti' '(' ( selector ',' )? bag_expression ( ',' STRING )? ')'
;
json_operator

View File

@@ -22,40 +22,6 @@ This enables the index implementations to be agnostic from the underlying data s
* Rust: https://www.rust-lang.org
## Quick start
## Building from sources
To build this project, you will need to run the following:
```sh
cargo build --release
```
### Installation
To install the software on the system you can use:
```sh
cargo install --release
```
### Usage
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin vehicula pretium
quam sit amet facilisis. Class aptent taciti sociosqu ad litora torquent per
conubia nostra, per inceptos himenaeos. Curabitur metus sapien, rhoncus vitae
eleifend nec, convallis vel nunc. Nulla metus mauris, porta eu porta eu,
vulputate et est. Suspendisse lacinia leo vel auctor aliquet. Maecenas non arcu
libero. Nulla ut eleifend dui. Cras bibendum pharetra facilisis. Proin mattis
libero non pharetra tristique. Nam massa nulla, ultrices pharetra quam a,
fermentum placerat dolor. Nullam mollis libero et neque lobortis, id dignissim
lectus dignissim. Maecenas ligula enim, congue in ornare vel, volutpat ut ante.
```sh
cargo run --release
```
## Documentation
For more information, please refer to the [documentation](https://epfl-dias.github.io/mercator_parser/).

1
book/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
book

6
book/book.toml Normal file
View File

@@ -0,0 +1,6 @@
[book]
authors = ["Lionel Sambuc"]
language = "en"
multilingual = false
src = "src"
title = "Mercator Parser"

5
book/src/SUMMARY.md Normal file
View File

@@ -0,0 +1,5 @@
# Summary
[Introduction](./introduction.md)
- [Filter Grammar](./filters.md)
- [Query Grammar](./queries.md)

10
book/src/filters.md Normal file
View File

@@ -0,0 +1,10 @@
# Filter Grammar
You will find below the definition of this SDL, for filtering data
from the index.
## filters.g4
```antlr
{{#include ../../Grammars/filters.g4}}
```

7
book/src/introduction.md Normal file
View File

@@ -0,0 +1,7 @@
# Introduction
To support volumetric queries for Mercator, a new domain-specific language (DSL) was created.
ANTLR was used to write and test the SDL, to check it stays simple
to parse and and fast to execute. The actual [parser](https://epfl-dias.github.io/mercator_parser/) and interpreter is
defined in rust, using [LALRPOP](https://docs.rs/lalrpop/0.18.1/lalrpop/).

9
book/src/queries.md Normal file
View File

@@ -0,0 +1,9 @@
# Query Grammar
You will find below the definition of this SDL, for queries. This builds on top of the [filters](filters.html) grammar.
## queries.g4
```antlr
{{#include ../../Grammars/queries.g4}}
```

2
rust-toolchain.toml Normal file
View File

@@ -0,0 +1,2 @@
[toolchain]
channel = "1.80.0"

View File

@@ -1,10 +1,13 @@
use mercator_db::SpaceObject;
use mercator_db::space;
use mercator_db::Properties;
use super::expressions::*;
use super::symbols::*;
impl Evaluator for Predicate {
fn eval(&self, object: &SpaceObject) -> bool {
impl<'e> Evaluator<'e> for Predicate {
type Object = (&'e String, &'e space::Position, &'e Properties);
fn eval(&self, object: Self::Object) -> bool {
match self {
Predicate::Not(predicate) => !predicate.eval(object),
Predicate::And(lh, rh) => lh.eval(object) && rh.eval(object),

View File

@@ -1,273 +1,358 @@
use std::collections::HashSet;
use std::collections::{HashMap, HashSet};
use std::rc::Rc;
use mercator_db::space;
use mercator_db::Core;
use mercator_db::CoreQueryParameters;
use mercator_db::SpaceObject;
use mercator_db::IterObjects;
use mercator_db::IterObjectsBySpaces;
use super::expressions::*;
use super::symbols::*;
impl From<&LiteralPosition> for space::Position {
fn from(literal: &LiteralPosition) -> Self {
let v: Vec<f64> = literal.into();
v.into()
fn group_by_space<'s>(
list: IterObjectsBySpaces<'s>,
) -> Box<dyn Iterator<Item = (&'s String, IterObjects<'s>)> + 's> {
// Filter per Properties, in order to regroup by it, then build
// a single SpatialObject per Properties.
let mut hashmap = HashMap::new();
for (space, objects) in list {
hashmap.entry(space).or_insert_with(Vec::new).push(objects);
}
Box::new(hashmap.into_iter().map(|(space, objects)| {
let objects: IterObjects = Box::new(objects.into_iter().flatten());
(space, objects)
}))
}
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
fn distinct_helper(list: IterObjectsBySpaces) -> IterObjectsBySpaces {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(list)
// We would lose some objects otherwise when creating the
// HashMaps. Also this makes sure to keep the values are unique.
.map(|(space, iter)| {
let uniques: HashSet<_> = iter.collect();
let uniques: IterObjects = Box::new(uniques.into_iter());
(space, uniques)
})
.collect()
}
fn complement_helper(
core: &Core,
parameters: &CoreQueryParameters,
space_id: &str,
inside: Vec<SpaceObject>,
) -> mercator_db::ResultSet {
fn into_positions_hashset(
objects_by_spaces: IterObjectsBySpaces,
) -> HashMap<&String, Rc<HashSet<space::Position>>> {
// Make sure to collect all objects iterators per space, so that
// each space appears only once.
group_by_space(objects_by_spaces)
// We would lose some objects otherwise when creating the HashSets.
.map(|(space, iter)| {
let hash_set: HashSet<_> = iter.map(|(position, _)| position).collect();
(space, Rc::new(hash_set))
})
.collect::<HashMap<_, _>>()
}
// Strictly not inside nor on the surface.
// TODO: inside must contains the valid positions in all expected spaces
fn complement_helper<'h>(
core: &'h Core,
parameters: &'h CoreQueryParameters<'h>,
space_id: &'h str,
inside: IterObjectsBySpaces<'h>,
) -> mercator_db::ResultSet<'h> {
let (low, high) = parameters.db.space(space_id)?.bounding_box();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(points) => Ok(points
.into_iter()
.filter(|o| !inside.contains(&o))
.collect::<Vec<_>>()),
}
let inside = into_positions_hashset(inside);
let points = core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
let results = points
.into_iter()
.filter_map(move |(space, v)| match inside.get(space) {
None => None, // Space not found, so no point might exist!
Some(volume) => {
let volume = volume.clone();
let iter: IterObjects = Box::new(v.filter(move |a| !volume.contains(&a.0)));
Some((space, iter))
}
})
.collect();
Ok(results)
}
fn distinct(core_id: &str, parameters: &CoreQueryParameters, bag: &Bag) -> mercator_db::ResultSet {
match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(mut v) => {
let set: HashSet<_> = v.drain(..).collect(); // dedup
v.extend(set.into_iter());
// Intersection based only on spatial positions!
fn intersect_helper<'h>(
smaller: IterObjectsBySpaces<'h>,
bigger: IterObjectsBySpaces<'h>,
) -> IterObjectsBySpaces<'h> {
let smaller = into_positions_hashset(smaller);
Ok(v)
}
}
bigger
.into_iter()
.filter_map(
move |(space, bigger_object_iter)| match smaller.get(space) {
None => None,
Some(volume) => {
let volume = volume.clone();
let filtered: IterObjects =
Box::new(bigger_object_iter.filter(move |a| volume.contains(&a.0)));
Some((space, filtered))
}
},
)
.collect()
}
fn filter(
core_id: &str,
parameters: &CoreQueryParameters,
predicate: &Option<Predicate>,
bag: &Bag,
) -> mercator_db::ResultSet {
match predicate {
None => bag.execute(core_id, parameters),
Some(predicate) => match bag.execute(core_id, parameters) {
e @ Err(_) => e,
Ok(results) => Ok(results
.into_iter()
.filter(|o| predicate.eval(&o))
.collect::<Vec<_>>()),
},
}
}
impl Bag {
fn distinct<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
fn complement(
core_id: &str,
parameters: &CoreQueryParameters,
core: &Core,
bag: &Bag,
) -> mercator_db::ResultSet {
match bag.execute(core_id, parameters) {
// FIXME: The complement of a set is computed within its definition space.
e @ Err(_) => e,
Ok(inside) => complement_helper(
Ok(distinct_helper(results))
}
fn complement<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
core: &'b Core,
) -> mercator_db::ResultSet<'b> {
let inside = self.execute(core_id, parameters)?;
// FIXME: The complement of a set should be computed within its
// definition space. We don't know here so we use universe
complement_helper(
core,
parameters,
mercator_db::space::Space::universe().name(),
inside,
),
)
}
}
fn intersection(
core_id: &str,
parameters: &CoreQueryParameters,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet {
let l = lh.execute(core_id, parameters);
if let Ok(l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(r) = r {
let mut v = vec![];
fn intersection<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let left = self.execute(core_id, parameters)?;
let right = rh.execute(core_id, parameters)?;
if rh.predict(parameters.db) < lh.predict(parameters.db) {
for o in r {
if l.contains(&o) {
v.push(o);
}
}
} else {
for o in l {
if r.contains(&o) {
v.push(o);
}
}
}
Ok(v)
let v = if rh.predict(parameters.db) < self.predict(parameters.db) {
intersect_helper(right, left)
} else {
r
}
} else {
l
}
}
intersect_helper(left, right)
};
fn union(
core_id: &str,
parameters: &CoreQueryParameters,
rh: &Bag,
lh: &Bag,
) -> mercator_db::ResultSet {
let l = lh.execute(core_id, parameters);
if let Ok(mut l) = l {
let r = rh.execute(core_id, parameters);
if let Ok(mut r) = r {
if rh.predict(parameters.db) < lh.predict(parameters.db) {
l.append(&mut r);
Ok(l)
} else {
r.append(&mut l);
Ok(r)
}
Ok(v)
}
fn union<'b>(
&'b self,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
rh: &'b Bag,
) -> mercator_db::ResultSet<'b> {
let mut left = self.execute(core_id, parameters)?;
let mut right = rh.execute(core_id, parameters)?;
let union = if rh.predict(parameters.db) < self.predict(parameters.db) {
left.append(&mut right);
left
} else {
r
}
} else {
l
right.append(&mut left);
right
};
Ok(union)
}
fn filter<'b>(
&'b self,
predicate: &'b Predicate,
core_id: &'b str,
parameters: &'b CoreQueryParameters<'b>,
) -> mercator_db::ResultSet<'b> {
let results = self.execute(core_id, parameters)?;
Ok(results
.into_iter()
.map(move |(space, positions)| {
let positions = positions.collect::<Vec<_>>();
(
space,
Box::new(positions.into_iter().filter(move |(position, properties)| {
predicate.eval((space, position, properties))
})) as IterObjects,
)
})
.collect())
}
}
fn bag(core_id: &str, parameters: &CoreQueryParameters, bags: &[Bag]) -> mercator_db::ResultSet {
let mut v = vec![];
for bag in bags {
let b = bag.execute(core_id, parameters);
match b {
e @ Err(_) => {
return e;
}
Ok(mut b) => {
v.append(&mut b);
}
}
}
Ok(v)
}
fn inside(parameters: &CoreQueryParameters, core: &Core, shape: &Shape) -> mercator_db::ResultSet {
let db = parameters.db;
let param = match shape {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
Err("The number of position is different from 2, which is unsupported.".to_string())
} else {
impl Shape {
fn inside<'s>(
&'s self,
parameters: &'s CoreQueryParameters<'s>,
core: &'s Core,
) -> mercator_db::ResultSet<'s> {
let db = parameters.db;
let param = match self {
Shape::Point(space_id, position) => {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
Ok((space_id, space::Shape::Point(position)))
}
}
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
let mut r = vec![];
for _ in 0..position.dimensions() {
r.push(radius.into());
Shape::HyperRectangle(space_id, bounding_box) => {
if bounding_box.len() != 2 {
//FIXME: Support arbitrary HyperRectangles
Err(
"The number of position is different from 2, which is unsupported."
.to_string(),
)
} else {
let space = db.space(space_id)?;
let low: Vec<f64> = (&bounding_box[0]).into();
let high: Vec<f64> = (&bounding_box[1]).into();
let low = space.encode(&low)?;
let high = space.encode(&high)?;
Ok((space_id, space::Shape::BoundingBox(low, high)))
}
}
let radius = space.encode(&r)?[0];
Shape::HyperSphere(space_id, position, radius) => {
let space = db.space(space_id)?;
let position: Vec<f64> = position.into();
let position = space.encode(&position)?;
//FIXME: RADIUS IS A LENGTH, HOW TO ENCODE IT INTO THE SPACE?
Ok((space_id, space::Shape::HyperSphere(position, radius)))
// We have to provide a position with all the dimensions
// for the encoding to work as expected.
let mut r = vec![0f64; position.dimensions()];
r[0] = radius.into();
let radius = space.encode(&r)?[0];
Ok((space_id, space::Shape::HyperSphere(position, radius)))
}
Shape::Label(_, id) => {
// Not a real shape, so short circuit and return.
return core.get_by_label(parameters, id);
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, shape, space_id),
Err(e) => Err(e),
}
Shape::Nifti(_space_id) => Err("Inside-Nifti: not yet implemented".to_string()),
};
}
match param {
Ok((space_id, shape)) => core.get_by_shape(parameters, &shape, space_id),
Err(e) => Err(e),
fn outside<'s>(
&'s self,
parameters: &'s CoreQueryParameters<'s>,
core: &'s Core,
) -> mercator_db::ResultSet<'s> {
let (space_id, inside) = match self {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
let positions = vec![position.into()];
let inside = core.get_by_positions(parameters, positions, space_id)?;
Ok((space_id, inside))
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(f64::EPSILON);
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
let inside =
core.get_by_shape(parameters, space::Shape::BoundingBox(low, high), space_id)?;
Ok((space_id, inside))
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= f64::EPSILON;
let center: space::Position = center.into();
let inside = core.get_by_shape(
parameters,
space::Shape::HyperSphere(center, radius.into()),
space_id,
)?;
Ok((space_id, inside))
}
Shape::Label(space_id, id) => {
let inside = core.get_by_label(parameters, id)?;
Ok((space_id, inside))
}
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
}?;
complement_helper(core, parameters, space_id, inside)
}
}
fn outside(parameters: &CoreQueryParameters, core: &Core, shape: &Shape) -> mercator_db::ResultSet {
match shape {
Shape::Point(space_id, position) => {
let position: Vec<f64> = position.into();
match core.get_by_positions(parameters, &[position.into()], space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperRectangle(space_id, bounding_box) => {
// We need to adapt the bounding_box to ensure the
// surface will not hit as part of the inside set, so we
// compute the biggest bounding box contained within the
// given box.
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(std::f64::EPSILON);
}
// Add it to the lower bound
let mut low: space::Position = (&bounding_box[0]).into();
low += increment.clone().into();
// Substract it from the upper bound
let mut high: space::Position = (&bounding_box[1]).into();
high -= increment.into();
match core.get_by_shape(parameters, &space::Shape::BoundingBox(low, high), space_id) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= std::f64::EPSILON;
let center: space::Position = center.into();
match core.get_by_shape(
parameters,
&space::Shape::HyperSphere(center, radius.into()),
space_id,
) {
e @ Err(_) => e,
Ok(inside) => complement_helper(core, parameters, space_id, inside),
}
}
Shape::Nifti(_space_id) => Err("Outside-nifti: not yet implemented".to_string()),
fn filter<'c>(
core_id: &'c str,
parameters: &'c CoreQueryParameters<'c>,
predicate: &'c Option<Predicate>,
bag: &'c Bag,
) -> mercator_db::ResultSet<'c> {
match predicate {
None => bag.execute(core_id, parameters),
Some(predicate) => bag.filter(predicate, core_id, parameters),
}
}
impl Executor for Projection {
type ResultSet = mercator_db::ResultSet;
fn bag<'c>(
core_id: &'c str,
parameters: &'c CoreQueryParameters<'c>,
bags: &'c [Bag],
) -> mercator_db::ResultSet<'c> {
let mut results = Vec::new();
for bag in bags {
let mut result = bag.execute(core_id, parameters)?;
results.append(&mut result);
}
fn execute(&self, core_id: &str, parameters: &CoreQueryParameters) -> Self::ResultSet {
Ok(results)
}
impl<'e> Executor<'e> for Projection {
type ResultSet = mercator_db::ResultSet<'e>;
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
) -> Self::ResultSet {
match self {
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
Projection::JSON(_, _format, bag) => {
Projection::Json(_, _format, bag) => {
bag.execute(core_id, parameters)
// FIXME: Add projections here
}
@@ -275,25 +360,29 @@ impl Executor for Projection {
}
}
impl Executor for Bag {
type ResultSet = mercator_db::ResultSet;
impl<'e> Executor<'e> for Bag {
type ResultSet = mercator_db::ResultSet<'e>;
fn execute(&self, core_id: &str, parameters: &CoreQueryParameters) -> Self::ResultSet {
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
) -> Self::ResultSet {
let core = parameters.db.core(core_id)?;
match self {
Bag::Distinct(bag) => distinct(core_id, parameters, bag),
Bag::Distinct(bag) => bag.distinct(core_id, parameters),
Bag::Filter(predicate, bag) => filter(core_id, parameters, predicate, bag),
Bag::Complement(bag) => complement(core_id, parameters, core, bag),
Bag::Intersection(lh, rh) => intersection(core_id, parameters, rh, lh),
Bag::Union(lh, rh) => union(core_id, parameters, rh, lh),
Bag::Complement(bag) => bag.complement(core_id, parameters, core),
Bag::Intersection(lh, rh) => lh.intersection(core_id, parameters, rh),
Bag::Union(lh, rh) => lh.union(core_id, parameters, rh),
Bag::Bag(list) => bag(core_id, parameters, list),
Bag::Inside(shape) => inside(parameters, core, shape),
Bag::Inside(shape) => shape.inside(parameters, core),
Bag::Outside(shape) => {
//FIXME: This is currently computed as the complement of the values within the shape, except its surface.
// Should this be instead a list of positions within the shape?
//FIXME: Should we use the Shape's Space to get the maximum bounds or the output Space requested?
outside(parameters, core, shape)
shape.outside(parameters, core)
}
}
}

View File

@@ -1,6 +1,5 @@
use mercator_db::CoreQueryParameters;
use mercator_db::DataBase;
use mercator_db::SpaceObject;
pub trait Validator {
type ValidationResult;
@@ -12,12 +11,18 @@ pub trait Predictor {
fn predict(&self, db: &DataBase) -> Result<f64, String>;
}
pub trait Executor {
pub trait Executor<'e> {
type ResultSet;
fn execute(&self, core_id: &str, parameters: &CoreQueryParameters) -> Self::ResultSet;
fn execute(
&'e self,
core_id: &'e str,
parameters: &'e CoreQueryParameters<'e>,
) -> Self::ResultSet;
}
pub trait Evaluator {
fn eval(&self, object: &SpaceObject) -> bool;
pub trait Evaluator<'e> {
type Object;
fn eval(&self, object: Self::Object) -> bool;
}

View File

@@ -1,15 +1,57 @@
#![forbid(unsafe_code)]
//! # Mercator Parser
//!
//! Query parser for Mercator.
//!
//! ## Mercator: Spatial Index
//!
//! **Mercator** is a spatial *volumetric* index for the
//! [Human Brain Project]. It is a component of the [Knowledge Graph]
//! service, which provides the spatial anchoring for the metadata
//! registered as well as processes the volumetric queries.
//!
//! It is build on top of the Iron Sea database toolkit.
//!
//! ## Iron Sea: Database Toolkit
//! **Iron Sea** provides a set of database engine bricks, which can be
//! combined and applied on arbitrary data structures.
//!
//! Unlike a traditional database, it does not assume a specific
//! physical structure for the tables nor the records, but relies on the
//! developer to provide a set of extractor functions which are used by
//! the specific indices provided.
//!
//! This enables the index implementations to be agnostic from the
//! underlying data structure, and re-used.
//!
//! [Human Brain Project]: http://www.humanbrainproject.eu
//! [Knowledge Graph]: http://www.humanbrainproject.eu/en/explore-the-brain/search/
#[macro_use]
extern crate lalrpop_util;
lalrpop_mod!(#[allow(clippy::all)] pub queries); // synthesized by LALRPOP
lalrpop_mod!(#[allow(clippy::all,unused_parens)] pub queries); // synthesized by LALRPOP
// Note: We do not enable for the whole library deny(missing_docs), as
// it requires the automatically generated parser to be documented
// as well.
// Instead we enable it per modules below, except for the tests.
//#[warn(missing_docs)]
mod evaluators;
//#[warn(missing_docs)]
mod executors;
//#[warn(missing_docs)]
mod expressions;
//#[warn(missing_docs)]
mod predictors;
//#[warn(missing_docs)]
mod validators;
//#[warn(missing_docs)]
mod symbols;
//#[warn(missing_docs)]
mod types;
pub use expressions::Executor;
@@ -17,6 +59,8 @@ pub use expressions::Predictor;
pub use expressions::Validator;
pub use queries::FiltersParser;
pub use queries::QueryParser;
pub use symbols::Bag;
pub use symbols::Projection;
pub use validators::ValidationResult;
#[cfg(test)]

View File

@@ -1,3 +1,5 @@
#![forbid(unsafe_code)]
#[macro_use]
extern crate measure_time;
@@ -5,11 +7,11 @@ use std::io;
use mercator_db::CoreQueryParameters;
use mercator_db::DataBase;
use parser::Executor;
use parser::FiltersParser;
use parser::Predictor;
use parser::QueryParser;
use parser::Validator;
use mercator_parser::Executor;
use mercator_parser::FiltersParser;
use mercator_parser::Predictor;
use mercator_parser::QueryParser;
use mercator_parser::Validator;
fn main() {
// If RUST_LOG is unset, set it to INFO, otherwise keep it as-is.
@@ -24,14 +26,16 @@ fn main() {
let db;
{
info_time!("Loading database index");
db = DataBase::load(&[&format!("{}.index", core)]).unwrap();
db = DataBase::load(&[&format!("{}.index", core)])
.unwrap_or_else(|e| panic!("Unable to load database '{}': {}", core, e));
}
let parameters = CoreQueryParameters {
db: &db,
output_space: None,
threshold_volume: None,
resolution: None,
view_port: &None,
resolution: &Some(vec![0]),
};
let parser = QueryParser::new();
let parser = FiltersParser::new();
@@ -51,7 +55,7 @@ fn main() {
}
info_time!("Interpretation");
let mut parse;
let parse;
{
info_time!("Parsing");
parse = parser.parse(&input);
@@ -89,12 +93,17 @@ fn main() {
execute = t.execute(core, &parameters);
}
if let Ok(r) = execute {
//let r = model::to_spatial_objects(&db, r);
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r.len());
} else {
info!("Execution: \n{:?}", execute);
match execute {
Ok(r) => {
let r = r
.into_iter()
.map(|(space, objects)| (space, objects.collect::<Vec<_>>()))
.collect::<Vec<_>>();
info!("Execution: \n{:#?}", r);
info!("NB results: {:?}", r[0].1.len());
}
Err(e) => info!("Execution: \n{:?}", e),
}
}
}

View File

@@ -7,7 +7,7 @@ impl Predictor for Projection {
fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self {
Projection::Nifti(_, _, bag) => bag.predict(db),
Projection::JSON(_, _, bag) => bag.predict(db),
Projection::Json(_, _, bag) => bag.predict(db),
}
}
}

View File

@@ -51,7 +51,7 @@ JsonOperator: symbols::Projection = {
None => Space::universe().name().clone(),
};
symbols::Projection::JSON(space_id, f, b)
symbols::Projection::Json(space_id, f, b)
}
};
@@ -135,7 +135,9 @@ Aggregations: symbols::Aggregation = {
//*********************************************************************/
// SELECTING / FILTERING DATA */
//*********************************************************************/
pub Filters = { Bags };
pub Filters: symbols::Bag = {
<Bags>
};
// All these expressions generate bags.
Bags: symbols::Bag = {
@@ -149,6 +151,8 @@ Bags: symbols::Bag = {
// Spatial Operators
Inside,
Outside,
// returns the positions or volume of the shape, instead of the data points in or outside it.
//Shape,
};
//*********************************************************************/
@@ -184,8 +188,24 @@ Filter: symbols::Bag = {
// "filter" "(" <p:Predicates> "," <b:Bags> ")" =>
"filter" "(" <b:Bags> ")" =>
symbols::Bag::Filter(None, Box::new(b)),
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" =>
symbols::get_filter(p, b)
"filter" "(" <p:Predicates> <b:("," <Bags> )?> ")" => {
match b {
None => {
let (low, high) = Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let shape = symbols::Shape::HyperRectangle(
Space::universe().name().clone(),
vec![
symbols::LiteralPosition(low.into_iter().map(symbols::LiteralNumber::Float).collect()),
symbols::LiteralPosition(high.into_iter().map(symbols::LiteralNumber::Float).collect()),
],
);
symbols::Bag::Filter(Some(p), Box::new(symbols::Bag::Inside(shape)))
}
Some(b) => symbols::Bag::Filter(Some(p), Box::new(b)),
}
},
};
Predicates: symbols::Predicate = {
@@ -268,6 +288,12 @@ Inside: symbols::Bag = {
symbols::Bag::Inside(<>)
};
//FIXME: ADD A SHAPE VARIANT WHICH JUST RETURNS ALL THE POSITIONS OF THAT SHAPE
//Shape: symbols::Bag = {
// <Shapes> =>
// symbols::Bag::Shape(<>)
//}
//*********************************************************************/
// SHAPES */
//*********************************************************************/
@@ -278,6 +304,7 @@ Shapes: symbols::Shape = {
Point,
HyperRectangle,
HyperSphere,
Label,
Nifti
};
@@ -329,6 +356,21 @@ Point: symbols::Shape = {
}
};
// Filter by Label, a.k.a use an ID to define a volume, and use that volume to
// select data points.
Label: symbols::Shape = {
"label" "{"
<id:String>
<rs:( "," <String> )?>
"}" => {
let space_id = match rs {
Some(id) => id,
None => Space::universe().name().clone(),
};
symbols::Shape::Label(space_id, id)
}
};
// Define a shape as the non-zero values in a NIfTI object, defined by
// nifti{
// spaceId: string,

View File

@@ -1,7 +1,7 @@
use std::cmp::Ordering;
use mercator_db::space;
use mercator_db::SpaceObject;
use mercator_db::Properties;
pub use super::types::*;
@@ -11,14 +11,14 @@ pub use super::types::*;
#[derive(Clone, Debug)]
pub enum Projection {
Nifti(String, LiteralSelector, Bag),
JSON(String, JsonValue, Bag),
Json(String, JsonValue, Bag),
}
impl Projection {
pub fn space(&self) -> &String {
match self {
Projection::Nifti(space, _, _) => &space,
Projection::JSON(space, _, _) => &space,
Projection::Nifti(space, _, _) => space,
Projection::Json(space, _, _) => space,
}
}
}
@@ -57,6 +57,7 @@ struct Transform {
/**********************************************************************/
#[derive(Clone, Debug)]
pub enum Bag {
// Bags
Distinct(Box<Bag>),
Filter(Option<Predicate>, Box<Bag>),
Complement(Box<Bag>),
@@ -120,6 +121,7 @@ pub enum Shape {
Point(String, LiteralPosition),
HyperRectangle(String, Vec<LiteralPosition>),
HyperSphere(String, LiteralPosition, LiteralNumber),
Label(String, String),
Nifti(String),
}
@@ -129,13 +131,14 @@ impl Shape {
Shape::Point(space, _) => space,
Shape::HyperRectangle(space, _) => space,
Shape::HyperSphere(space, _, _) => space,
Shape::Label(space, _) => space,
Shape::Nifti(space) => space,
}
}
pub fn volume(&self) -> f64 {
match self {
Shape::Point(_, _) => 1.0, // This is the smallest non-zero volume possible //TODO DOUBLE CHECK IT IS TRUE
Shape::Point(_, _) => f64::EPSILON, // The smallest non-zero volume possible
Shape::HyperRectangle(_space, pos) => {
//TODO: At this time, only aligned to the axes, defined by two points, hyperrectangles are supported.
assert_eq!(pos.len(), 2);
@@ -197,9 +200,17 @@ impl Shape {
a * radius.powi(i as i32)
}
Shape::Nifti(_) => unimplemented!(),
Shape::Label(_, _) => {
// FIXME: Needs to find a way to figure out the approximate volume of this specific ID, or return MAX or MIN..
f64::EPSILON
}
Shape::Nifti(_) => unimplemented!("Nifti"),
}
}
pub fn rasterize<'e>(&self) -> mercator_db::ResultSet<'e> {
unimplemented!("rasterize")
}
}
/**********************************************************************/
@@ -213,7 +224,10 @@ pub enum Position {
}
impl Position {
pub fn value(&self, object: &SpaceObject) -> LiteralPosition {
pub fn value<'e>(
&self,
object: (&'e String, &'e space::Position, &'e Properties),
) -> LiteralPosition {
match self {
Position::Literal(literal) => literal.clone(),
Position::Selector(selector) => selector.position(object),
@@ -223,7 +237,9 @@ impl Position {
Ordering::Greater => 1,
Ordering::Less => -1,
};
LiteralPosition(vec![LiteralNumber::Int(x)])
let v = vec![LiteralNumber::Int(x)];
LiteralPosition(v)
}
}
}
@@ -242,14 +258,33 @@ pub enum LiteralNumber {
Float(f64),
}
impl From<&LiteralNumber> for Vec<f64> {
impl From<&LiteralNumber> for f64 {
fn from(l: &LiteralNumber) -> Self {
let r = match l {
match l {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
};
}
}
}
vec![r]
impl From<LiteralNumber> for f64 {
fn from(l: LiteralNumber) -> Self {
(&l).into()
}
}
impl From<&LiteralNumber> for space::Coordinate {
fn from(literal: &LiteralNumber) -> Self {
match literal {
LiteralNumber::Float(f) => (*f).into(),
LiteralNumber::Int(i) => (*i as u64).into(),
}
}
}
impl From<LiteralNumber> for space::Coordinate {
fn from(literal: LiteralNumber) -> Self {
(&literal).into()
}
}
@@ -274,7 +309,7 @@ pub struct LiteralPosition(pub Vec<LiteralNumber>);
impl LiteralPosition {
pub fn get_type(&self) -> LiteralTypes {
let Self(v) = self;
let mut t = Vec::new();
let mut t = Vec::with_capacity(v.len());
for n in v {
t.push(match n {
@@ -307,29 +342,73 @@ impl LiteralPosition {
}
}
impl From<&LiteralNumber> for f64 {
fn from(l: &LiteralNumber) -> Self {
match l {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
impl From<&LiteralPosition> for Vec<f64> {
fn from(l: &LiteralPosition) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//let LiteralPosition(v) = l;
//v.iter().map(|literal| literal.into()).collect()
let LiteralPosition(v) = l;
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(value.into());
}
lv
}
}
impl From<&LiteralPosition> for Vec<f64> {
fn from(l: &LiteralPosition) -> Self {
let LiteralPosition(v) = l;
let mut r = Vec::with_capacity(v.len());
impl From<LiteralPosition> for Vec<f64> {
fn from(l: LiteralPosition) -> Self {
(&l).into()
}
}
for x in v {
let x = match x {
LiteralNumber::Int(x) => (*x) as f64,
LiteralNumber::Float(x) => *x,
};
r.push(x);
impl From<&Vec<f64>> for LiteralPosition {
fn from(v: &Vec<f64>) -> Self {
// Speed-wise this should be the same, the downside is the newly
// allocated vector might be suboptimal in terms of space.
//LiteralPosition(v.iter().map(|value| LiteralNumber::Float(*value)).collect())
let mut lv = Vec::with_capacity(v.len());
for value in v {
lv.push(LiteralNumber::Float(*value));
}
r
LiteralPosition(lv)
}
}
impl From<Vec<f64>> for LiteralPosition {
fn from(v: Vec<f64>) -> Self {
(&v).into()
}
}
impl From<&space::Position> for LiteralPosition {
fn from(position: &space::Position) -> Self {
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<space::Position> for LiteralPosition {
fn from(position: space::Position) -> Self {
(&position).into()
}
}
impl From<&LiteralPosition> for space::Position {
fn from(position: &LiteralPosition) -> Self {
let position: Vec<f64> = position.into();
position.into()
}
}
impl From<LiteralPosition> for space::Position {
fn from(position: LiteralPosition) -> Self {
(&position).into()
}
}
@@ -379,59 +458,30 @@ impl LiteralSelector {
}
// FIXME: THIS IS SOOO WRONG
pub fn position(&self, object: &SpaceObject) -> LiteralPosition {
pub fn position<'e>(
&self,
object: (&'e String, &'e space::Position, &'e Properties),
) -> LiteralPosition {
println!("LiteralSelector.position(): {:?}", self);
let v: Vec<f64> = object.position.clone().into();
LiteralPosition(v.into_iter().map(LiteralNumber::Float).collect::<Vec<_>>())
object.1.into()
}
// FIXME: THIS IS SOOO WRONG
pub fn str(&self, object: &SpaceObject) -> String {
pub fn str<'e>(&self, object: (&'e String, &'e space::Position, &'e Properties)) -> &'e str {
let LiteralSelector(v) = self;
let last = v.last();
if let Some(Field(name, _)) = last {
if name == "id" {
return object.value.id().into();
return object.2.id();
} else if name == "type" {
return object.value.type_name().into();
return object.2.type_name();
} else if name == "reference_space" {
return object.space_id.clone();
return object.0;
}
}
println!("LiteralSelector.str(): {:?}", self);
unimplemented!();
}
}
// The logic was getting a bit too complex to be embedded directly into the
// grammar definition.
pub fn get_filter(p: Predicate, b: Option<Bag>) -> Bag {
match b {
Some(b) => Bag::Filter(Some(p), Box::new(b)),
None => {
let (low, high) = space::Space::universe().bounding_box();
let low: Vec<_> = low.into();
let high: Vec<_> = high.into();
let bb = Shape::HyperRectangle(
space::Space::universe().name().clone(),
vec![
LiteralPosition(
low.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
LiteralPosition(
high.into_iter()
.map(LiteralNumber::Float)
.collect::<Vec<_>>(),
),
],
);
Bag::Filter(Some(p), Box::new(Bag::Inside(bb)))
}
unimplemented!("Unknown Field");
}
}

View File

@@ -16,7 +16,7 @@ mod parsing {
fn query() {
let p = query_parser();
let nifti = "nifti(point{[0]})";
let nifti = "nifti(inside(point{[0]}))";
// Option is Empty
assert!(p.parse("").is_ok());
@@ -47,14 +47,14 @@ mod parsing {
let p = query_parser();
// Check allowed forms of the operator
assert!(p.parse("nifti(point{[0]})").is_ok());
assert!(p.parse("nifti(.properties.id, point{[0]})").is_ok());
assert!(p.parse("nifti(inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(.properties.id, inside(point{[0]}))").is_ok());
unimplemented!(); // TO REMEMBER SOME WORK IS DUE HERE.
//FIXME: THIS SHOULD BE ALLOWED
assert!(p.parse("nifti(2, point{[0]})").is_ok());
assert!(p.parse("nifti(2.23, point{[0]})").is_ok());
assert!(p.parse("nifti(2, inside(point{[0]}))").is_ok());
assert!(p.parse("nifti(2.23, inside(point{[0]}))").is_ok());
//FIXME: SYNTAX OK, TYPE NOT
assert!(p.parse("nifti(point{[0], \"space\"})").is_err());
@@ -64,16 +64,16 @@ mod parsing {
fn json_operator() {
let p = query_parser();
assert!(p.parse("json(true, point{[0]})").is_ok());
assert!(p.parse("json(23, point{[0]})").is_ok());
assert!(p.parse("json([23, 24], point{[0]})").is_ok());
assert!(p.parse("json([23, count(.)], point{[0]})").is_ok());
assert!(p.parse("json(true, inside(point{[0]}))").is_ok());
assert!(p.parse("json(23, inside(point{[0]}))").is_ok());
assert!(p.parse("json([23, 24], inside(point{[0]}))").is_ok());
assert!(p.parse("json([23, count(.)], inside(point{[0]}))").is_ok());
assert!(p.parse("json(true)").is_err());
assert!(p.parse("json(true,)").is_err());
assert!(p.parse("json(, point{[0]})").is_err());
assert!(p.parse("json(point{[0]})").is_err());
assert!(p.parse("json(, inside(point{[0]}))").is_err());
assert!(p.parse("json(inside(point{[0]}))").is_err());
assert!(p.parse("json(true, point)").is_err());
}
@@ -83,24 +83,24 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "true").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "true").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "false").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "false").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "null").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "null").as_str())
.is_ok());
// Incorrect capitalisation
assert!(p
.parse(format!("json({}, point{{[0]}})", "True").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "True").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "False").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "False").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "Null").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "Null").as_str())
.is_err());
}
@@ -109,24 +109,24 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "{}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0, \"field1\": 1}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0, \"field1\": 1}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": [0, 1]}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": [0, 1]}").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": {\"field1\": 0}}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": {\"field1\": 0}}").as_str())
.is_ok());
assert!(p
.parse(
format!(
"json({}, point{{[0]}})",
"json({}, inside(point{{[0]}}))",
"{\"field\": [{\"field1\": 0}, {\"field1\": 1}]}"
)
.as_str()
@@ -139,25 +139,25 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "{:}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{:}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{field: 0}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{field: 0}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{0: 0}").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{0: 0}").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"0\": }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": }").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"0\": 0 }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"0\": 0 }").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": 0 }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": 0 }").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "{\"field\": \"0\" }").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "{\"field\": \"0\" }").as_str())
.is_ok());
}
@@ -166,20 +166,20 @@ mod parsing {
let p = query_parser();
assert!(p
.parse(format!("json({}, point{{[0]}})", "[, 0]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[, 0]").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "[]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[]").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "[0]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[0]").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "[0, 1]").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "[0, 1]").as_str())
.is_ok());
assert!(p
.parse(
format!("json({}, point{{[0]}})", "[{\"field\": 0}, {\"field\": 1}]").as_str()
format!("json({}, inside(point{{[0]}}))", "[{\"field\": 0}, {\"field\": 1}]").as_str()
)
.is_ok());
}
@@ -190,40 +190,40 @@ mod parsing {
// count ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "count()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "count(distinct)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct)").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "count(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count(.)").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "count(distinct .)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "count(distinct .)").as_str())
.is_ok());
// sum ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "sum()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "sum()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "sum(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "sum(.)").as_str())
.is_ok());
// min ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "min()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "min()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "min(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "min(.)").as_str())
.is_ok());
// max ()
assert!(p
.parse(format!("json({}, point{{[0]}})", "max()").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "max()").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "max(.)").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "max(.)").as_str())
.is_ok());
}
@@ -233,42 +233,42 @@ mod parsing {
// Integers
assert!(p
.parse(format!("json({}, point{{[0]}})", "0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+0").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "1").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+1").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-1").as_str())
.is_ok());
// Floating point values
assert!(p
.parse(format!("json({}, point{{[0]}})", "0.0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "0.0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+0.0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+0.0").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-0.0").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-0.0").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "0.1").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "0.1").as_str())
.is_ok());
assert!(p
.parse(format!("json({}, point{{[0]}})", "+0.01").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "+0.01").as_str())
.is_err());
assert!(p
.parse(format!("json({}, point{{[0]}})", "-0.01").as_str())
.parse(format!("json({}, inside(point{{[0]}}))", "-0.01").as_str())
.is_ok());
}
}
@@ -290,7 +290,7 @@ mod parsing {
assert!(p.parse("").is_err());
assert!(p.parse("point{[0]}").is_ok());
assert!(p.parse("inside(point{[0]})").is_ok());
}
/* Not useful to test this rule
@@ -305,7 +305,7 @@ mod parsing {
assert!(p.parse("distinct()").is_err());
assert!(p.parse("distinct(point{[0]})").is_ok());
assert!(p.parse("distinct(inside(point{[0]}))").is_ok());
}
#[test]
@@ -314,7 +314,7 @@ mod parsing {
assert!(p.parse("complement()").is_err());
assert!(p.parse("complement(point{[0]})").is_ok());
assert!(p.parse("complement(inside(point{[0]}))").is_ok());
}
#[test]
@@ -322,12 +322,12 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("intersection()").is_err());
assert!(p.parse("intersection(point{[0]})").is_err());
assert!(p.parse("intersection(inside(point{[0]}))").is_err());
assert!(p
.parse("intersection(point{[0]}, point{[0]}, point{[0]})")
.parse("intersection(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
.is_err());
assert!(p.parse("intersection(point{[0]}, point{[0]})").is_ok());
assert!(p.parse("intersection(inside(point{[0]}), inside(point{[0]}))").is_ok());
}
#[test]
@@ -335,12 +335,12 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("union()").is_err());
assert!(p.parse("union(point{[0]})").is_err());
assert!(p.parse("union(inside(point{[0]}))").is_err());
assert!(p
.parse("union(point{[0]}, point{[0]}, point{[0]})")
.parse("union(inside(point{[0]}), inside(point{[0]}), inside(point{[0]}))")
.is_err());
assert!(p.parse("union(point{[0]}, point{[0]})").is_ok());
assert!(p.parse("union(inside(point{[0]}), inside(point{[0]}))").is_ok());
}
#[test]
@@ -348,10 +348,10 @@ mod parsing {
let p = filters_parser();
assert!(p.parse("filter()").is_err());
assert!(p.parse("filter(point{[0]})").is_ok());
assert!(p.parse("filter(inside(point{[0]}))").is_ok());
assert!(p.parse("filter(=(., [0]))").is_ok());
assert!(p.parse("filter(=(., [0]), point{[0]})").is_ok());
assert!(p.parse("filter(=(., [0]), inside(point{[0]}))").is_ok());
}
/* Not useful to test this rule
@@ -365,17 +365,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<(., [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<(, [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<(.)").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "<()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "<()").as_str())
.is_err());
}
@@ -384,17 +384,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">(., [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">(, [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">(.)").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", ">()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", ">()").as_str())
.is_err());
}
@@ -403,17 +403,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=(., [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=(., [0])").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=(, [0])").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=(, [0])").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=(.)").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=(.)").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "=()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "=()").as_str())
.is_err());
}
@@ -422,11 +422,11 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "!(=(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "!(=(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "!()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "!()").as_str())
.is_err());
}
@@ -435,17 +435,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&(=(., [0]), =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&(=(., [0]), =(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&(, =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&(, =(., [0]))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&(|(=(., [0])))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&(|(=(., [0])))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "&()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "&()").as_str())
.is_err());
}
@@ -454,17 +454,17 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|(=(., [0]), =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|(=(., [0]), =(., [0]))").as_str())
.is_ok());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|(, =(., [0]))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|(, =(., [0]))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|(|(=(., [0])))").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|(|(=(., [0])))").as_str())
.is_err());
assert!(p
.parse(format!("filter({}, point{{[0]}})", "|()").as_str())
.parse(format!("filter({}, inside(point{{[0]}}))", "|()").as_str())
.is_err());
}
@@ -474,11 +474,11 @@ mod parsing {
assert!(p.parse("bag{}").is_err());
assert!(p.parse("bag{point{[0]}}").is_ok());
assert!(p.parse("bag{point{[0]}, point{[0]}}").is_ok());
assert!(p.parse("bag{point{[0]}, point{[0]}, point{[0]}}").is_ok());
assert!(p.parse("bag{inside(point{[0]})}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]})}").is_ok());
assert!(p.parse("bag{inside(point{[0]}), inside(point{[0]}), inside(point{[0]})}").is_ok());
assert!(p
.parse("bag{point{[0]}, hypersphere{[0], 1}, hyperrectangle{[0], [1]}}")
.parse("bag{inside(point{[0]}), inside(hypersphere{[0], 1}), inside(hyperrectangle{[0], [1]})}")
.is_ok());
}
@@ -518,21 +518,21 @@ mod parsing {
// At least two positions when it is aligned with the axis, otherwise an even number
// of positions, as the number of vertices follows the rule 2**k, where k is the number
// of dimensions of the space containing the hyperrectangle.
assert!(p.parse("hyperrectangle{}").is_err());
assert!(p.parse("hyperrectangle{[]}").is_err());
assert!(p.parse("hyperrectangle{[0]}").is_err());
assert!(p.parse("hyperrectangle{[0], [1], [2]}").is_err());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3], [4]}").is_err());
assert!(p.parse("inside(hyperrectangle{})").is_err());
assert!(p.parse("inside(hyperrectangle{[]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2]})").is_err());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3], [4]})").is_err());
assert!(p.parse("hyperrectangle{[0], [1]}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], \"space\"}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p.parse("hyperrectangle{[0], [1], [2], [3]}").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1]})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], \"space\"})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
assert!(p.parse("inside(hyperrectangle{[0], [1], [2], [3]})").is_ok());
assert!(p
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5]}")
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5]})")
.is_ok());
assert!(p
.parse("hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"}")
.parse("inside(hyperrectangle{[0], [1], [2], [3], [4], [5], \"space\"})")
.is_ok());
}
@@ -540,23 +540,23 @@ mod parsing {
fn hyperrsphere() {
let p = filters_parser();
assert!(p.parse("hypersphere{}").is_err());
assert!(p.parse("hypersphere{[]}").is_err());
assert!(p.parse("hypersphere{[0]}").is_err());
assert!(p.parse("inside(hypersphere{}").is_err());
assert!(p.parse("inside(hypersphere{[]})").is_err());
assert!(p.parse("inside(hypersphere{[0]})").is_err());
assert!(p.parse("hypersphere{[0], 23}").is_ok());
assert!(p.parse("hypersphere{[0], 23, \"space\"}").is_ok());
assert!(p.parse("inside(hypersphere{[0], 23})").is_ok());
assert!(p.parse("inside(hypersphere{[0], 23, \"space\"})").is_ok());
}
#[test]
fn point() {
let p = filters_parser();
assert!(p.parse("point{}").is_err());
assert!(p.parse("point{[]}").is_err());
assert!(p.parse("inside(point{})").is_err());
assert!(p.parse("inside(point{[]})").is_err());
assert!(p.parse("point{[0]}").is_ok());
assert!(p.parse("point{[0], \"space\"}").is_ok());
assert!(p.parse("inside(point{[0]})").is_ok());
assert!(p.parse("inside(point{[0], \"space\"})").is_ok());
}
#[test]
@@ -579,30 +579,30 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(.field, \"\")"
)
.as_str()
)
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "[0]").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "[0]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "point{[0]}").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "inside(point{[0]})").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "{0}").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "{0}").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "").as_str())
.is_err());
}*/
@@ -612,15 +612,15 @@ mod parsing {
assert!(p
.parse(
format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field, \"\")").as_str()
format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field, \"\")").as_str()
)
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(.field)").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(.field)").as_str())
.is_err());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", "str_cmp(\"\")").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", "str_cmp(\"\")").as_str())
.is_err());
}
@@ -631,7 +631,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(.field, \"\")"
)
.as_str()
@@ -641,7 +641,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(.field)"
)
.as_str()
@@ -650,7 +650,7 @@ mod parsing {
assert!(p
.parse(
format!(
"filter(=({}, [1]), point{{[0]}})",
"filter(=({}, [1]), inside(point{{[0]}}))",
"str_cmp_ignore_case(\"\")"
)
.as_str()
@@ -663,19 +663,19 @@ mod parsing {
let p = filters_parser();
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field[1].field").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field[1].field").as_str())
.is_ok());
assert!(p
.parse(format!("filter(=({}, [1]), point{{[0]}})", ".field.field[1]").as_str())
.parse(format!("filter(=({}, [1]), inside(point{{[0]}}))", ".field.field[1]").as_str())
.is_ok());
}
@@ -684,26 +684,26 @@ mod parsing {
let p = filters_parser();
// Empty
assert!(p.parse(format!("point{{{}}}", "[]").as_str()).is_err());
assert!(p.parse(format!("inside(point{{{}}})", "[]").as_str()).is_err());
// Non-numerical coordinate:
assert!(p.parse(format!("point{{{}}}", "[aa]").as_str()).is_err());
assert!(p.parse(format!("inside(point{{{}}})", "[aa]").as_str()).is_err());
assert!(p
.parse(format!("point{{{}}}", "[\"aa\"]").as_str())
.parse(format!("inside(point{{{}}})", "[\"aa\"]").as_str())
.is_err());
// One or more coordinates
assert!(p.parse(format!("point{{{}}}", "[0]").as_str()).is_ok());
assert!(p.parse(format!("point{{{}}}", "[0, 0]").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{{}}})", "[0]").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{{}}})", "[0, 0]").as_str()).is_ok());
assert!(p
.parse(format!("point{{{}}}", "[0, 0, 0]").as_str())
.parse(format!("inside(point{{{}}})", "[0, 0, 0]").as_str())
.is_ok());
assert!(p
.parse(format!("point{{{}}}", "[0, 0, 0, 0]").as_str())
.parse(format!("inside(point{{{}}})", "[0, 0, 0, 0]").as_str())
.is_ok());
assert!(p
.parse(format!("point{{{}}}", "[0,0,0,0]").as_str())
.parse(format!("inside(point{{{}}})", "[0,0,0,0]").as_str())
.is_ok());
}
@@ -713,66 +713,66 @@ mod parsing {
// Single dot
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".").as_str())
.is_ok());
// Check first character is within allowed characters
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".a").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".a").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", "._").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", "._").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".2").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".2").as_str())
.is_err());
// Check second character is within allowed characters
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".fa").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".fa").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f_").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f_").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f2").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f2").as_str())
.is_ok());
// Check we can add subscript
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".[23]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".[23]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[0]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[0]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2]").as_str())
.is_ok());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[23]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[23]").as_str())
.is_ok());
// Invalid index values
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2.3]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2.3]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[02]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[02]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[-2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[-2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2e2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2e2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[2E2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[2E2]").as_str())
.is_err());
assert!(p
.parse(format!("filter(<({}, [1]), point{{[0]}})", ".f[+2]").as_str())
.parse(format!("filter(<({}, [1]), inside(point{{[0]}}))", ".f[+2]").as_str())
.is_err());
}
@@ -836,42 +836,42 @@ mod parsing {
// Integers
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-0").as_str())
.is_err());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "1").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+1").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-1").as_str())
.is_err());
// Floating point values
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "0.0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "0.0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+0.0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.0").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-0.0").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.0").as_str())
.is_err());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "0.1").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "0.1").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "+0.01").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "+0.01").as_str())
.is_ok());
assert!(p
.parse(format!("hypersphere{{[0],{}}}", "-0.01").as_str())
.parse(format!("inside(hypersphere{{[0],{}}})", "-0.01").as_str())
.is_err());
}
@@ -880,20 +880,20 @@ mod parsing {
let p = filters_parser();
// Integers
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-1").as_str()).is_ok());
// Floating point values
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "-0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "+0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "-0.01").as_str()).is_ok());
}
#[test]
@@ -901,54 +901,54 @@ mod parsing {
let p = filters_parser();
// Integers
assert!(p.parse(format!("point{{[{}]}}", "0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "100").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "100").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "010").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "010").as_str()).is_err());
// Floating point values (normalized)
assert!(p.parse(format!("point{{[{}]}}", "0.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "0.").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "0.").as_str()).is_err());
assert!(p
.parse(format!("point{{[{}]}}", "0.1E03").as_str())
.parse(format!("inside(point{{[{}]}})", "0.1E03").as_str())
.is_err());
assert!(p
.parse(format!("point{{[{}]}}", "0.1E0.3").as_str())
.parse(format!("inside(point{{[{}]}})", "0.1E0.3").as_str())
.is_err());
// Floating point values (denormalized)
assert!(p.parse(format!("point{{[{}]}}", "1.0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.01").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "10.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e0").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e+2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1e-2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E2").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.1E23").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.01").as_str()).is_ok());
assert!(p.parse(format!("inside(point{{[{}]}})", "10.1").as_str()).is_ok());
assert!(p.parse(format!("point{{[{}]}}", "1.").as_str()).is_err());
assert!(p.parse(format!("point{{[{}]}}", "01.1").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "1.").as_str()).is_err());
assert!(p.parse(format!("inside(point{{[{}]}})", "01.1").as_str()).is_err());
assert!(p
.parse(format!("point{{[{}]}}", "1.1E03").as_str())
.parse(format!("inside(point{{[{}]}})", "1.1E03").as_str())
.is_err());
assert!(p
.parse(format!("point{{[{}]}}", "1.1E0.3").as_str())
.parse(format!("inside(point{{[{}]}})", "1.1E0.3").as_str())
.is_err());
}
}

View File

@@ -11,23 +11,10 @@ pub enum LiteralTypes {
impl PartialEq for LiteralTypes {
fn eq(&self, other: &Self) -> bool {
match self {
LiteralTypes::String => match other {
LiteralTypes::String => true,
_ => false,
},
LiteralTypes::Int => match other {
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Float => match other {
LiteralTypes::Float => true,
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Bag(_) => match other {
LiteralTypes::Bag(_) => true,
_ => false,
},
LiteralTypes::String => matches!(other, LiteralTypes::String),
LiteralTypes::Int => matches!(other, LiteralTypes::Int),
LiteralTypes::Float => matches!(other, LiteralTypes::Float | LiteralTypes::Int),
LiteralTypes::Bag(_) => matches!(other, LiteralTypes::Bag(_)),
LiteralTypes::Vector(v) => match other {
LiteralTypes::Vector(ov) => {
let n = v.len();

View File

@@ -9,7 +9,7 @@ impl Validator for Projection {
fn validate(&self) -> ValidationResult {
match self {
Projection::Nifti(_, _, _) => Err("not yet implemented".to_string()),
Projection::JSON(_, _format, bag) => bag.validate(),
Projection::Json(_, _format, bag) => bag.validate(),
//FIXME: Add support for projections
/* match format.validate() {
Ok(_) => bag.validate(),
@@ -61,10 +61,7 @@ impl Validator for Bag {
Bag::Union(lh, rh) => compare_bag_types(lh, rh),
Bag::Bag(bags) => {
for b in bags {
let t = b.validate();
if t.is_err() {
return t;
}
b.validate()?;
}
Ok(get_type())
@@ -148,6 +145,10 @@ impl Validator for Shape {
}
}
Shape::HyperSphere(_, pos, _) => pos.validate(),
Shape::Label(_, _) => {
// FIXME: Quick Hack, we need to fix this and return the effective type of the object Id.
Ok(LiteralPosition(vec![]).get_type())
}
Shape::Nifti(_) => Err("not yet implemented".to_string()),
}
}