Compare commits

...

5 Commits

Author SHA1 Message Date
40786112d0 Updated compiler and dependencies
* Upgraded to rustc 1.80
 * Updated dependencies
 * Fixed most linter warnings
2024-08-09 19:15:37 +02:00
f68ec7af14 Fix compilation with rustc 1.4.2
More adaptation are required to be able to use the most recent version
of Rust, therefore fix the dependencies and the compiler version.
2024-08-08 12:14:21 +02:00
e4cbdf836f Rename library to mercator_parser 2020-04-01 18:14:45 +02:00
242de73053 Adding documentation 2020-04-01 17:06:52 +02:00
98b37e63b4 Silence warning in generated code 2020-03-17 17:18:50 +01:00
17 changed files with 115 additions and 84 deletions

View File

@@ -21,7 +21,7 @@ include = ["Cargo.toml", "README.md", "LICENSE", "ACKNOWLEDGEMENTS", "src/**/*.r
build = "build.rs" # LALRPOP preprocessing
[lib]
name = "parser"
name = "mercator_parser"
path = "src/lib.rs"
[[bin]]
@@ -33,19 +33,19 @@ required-features = ["bin"]
bin = ["measure_time", "pretty_env_logger"]
[dependencies]
mercator_db = "^0.1"
mercator_db = "0.1"
lalrpop-util = "^0.17"
regex = "^1.2"
lalrpop-util = "0.20"
regex = "1.10"
# Logging macros API
#log = { version = "^0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "^0.4", features = ["max_level_trace", "release_max_level_trace"] }
#log = { version = "0.4", features = ["max_level_trace", "release_max_level_info"] }
log = { version = "0.4", features = ["max_level_trace", "release_max_level_trace"] }
# Used for main.rs
pretty_env_logger = { version = "^0.3", optional = true } # Logger implementation
measure_time = { version = "^0.6", optional = true } # To mesure parsing time, only required by binary
pretty_env_logger = { version = "0.5", optional = true } # Logger implementation
measure_time = { version = "0.8", optional = true } # To mesure parsing time, only required by binary
[build-dependencies]
lalrpop = "^0.17.1"
lalrpop = "0.20"

View File

@@ -22,40 +22,6 @@ This enables the index implementations to be agnostic from the underlying data s
* Rust: https://www.rust-lang.org
## Quick start
## Building from sources
To build this project, you will need to run the following:
```sh
cargo build --release
```
### Installation
To install the software on the system you can use:
```sh
cargo install --release
```
### Usage
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin vehicula pretium
quam sit amet facilisis. Class aptent taciti sociosqu ad litora torquent per
conubia nostra, per inceptos himenaeos. Curabitur metus sapien, rhoncus vitae
eleifend nec, convallis vel nunc. Nulla metus mauris, porta eu porta eu,
vulputate et est. Suspendisse lacinia leo vel auctor aliquet. Maecenas non arcu
libero. Nulla ut eleifend dui. Cras bibendum pharetra facilisis. Proin mattis
libero non pharetra tristique. Nam massa nulla, ultrices pharetra quam a,
fermentum placerat dolor. Nullam mollis libero et neque lobortis, id dignissim
lectus dignissim. Maecenas ligula enim, congue in ornare vel, volutpat ut ante.
```sh
cargo run --release
```
## Documentation
For more information, please refer to the [documentation](https://epfl-dias.github.io/mercator_parser/).

1
book/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
book

6
book/book.toml Normal file
View File

@@ -0,0 +1,6 @@
[book]
authors = ["Lionel Sambuc"]
language = "en"
multilingual = false
src = "src"
title = "Mercator Parser"

5
book/src/SUMMARY.md Normal file
View File

@@ -0,0 +1,5 @@
# Summary
[Introduction](./introduction.md)
- [Filter Grammar](./filters.md)
- [Query Grammar](./queries.md)

10
book/src/filters.md Normal file
View File

@@ -0,0 +1,10 @@
# Filter Grammar
You will find below the definition of this SDL, for filtering data
from the index.
## filters.g4
```antlr
{{#include ../../Grammars/filters.g4}}
```

7
book/src/introduction.md Normal file
View File

@@ -0,0 +1,7 @@
# Introduction
To support volumetric queries for Mercator, a new domain-specific language (DSL) was created.
ANTLR was used to write and test the SDL, to check it stays simple
to parse and and fast to execute. The actual [parser](https://epfl-dias.github.io/mercator_parser/) and interpreter is
defined in rust, using [LALRPOP](https://docs.rs/lalrpop/0.18.1/lalrpop/).

9
book/src/queries.md Normal file
View File

@@ -0,0 +1,9 @@
# Query Grammar
You will find below the definition of this SDL, for queries. This builds on top of the [filters](filters.html) grammar.
## queries.g4
```antlr
{{#include ../../Grammars/queries.g4}}
```

2
rust-toolchain.toml Normal file
View File

@@ -0,0 +1,2 @@
[toolchain]
channel = "1.80.0"

View File

@@ -38,12 +38,10 @@ fn complement_helper<'c>(
Ok(points
.into_iter()
.filter_map(|(space, v)| match hashmap.get(space) {
None => None,
Some(list) => {
Some((space, v.into_iter().filter(|t| !list.contains(t)).collect()))
}
})
.filter_map(|(space, v)|
hashmap.get(space).map(|list|
(space, v.into_iter().filter(|t|
!list.contains(t)).collect())))
.collect::<Vec<_>>())
}
}
@@ -74,7 +72,7 @@ fn distinct<'c>(
e @ Err(_) => e,
Ok(mut v) => {
let set: HashSet<_> = v.drain(..).collect(); // dedup
v.extend(set.into_iter());
v.extend(set);
Ok(v)
}
@@ -318,7 +316,7 @@ fn outside<'c>(
// Smallest increment possible
let mut increment = Vec::with_capacity(bounding_box[0].dimensions());
for _ in 0..bounding_box[0].dimensions() {
increment.push(std::f64::EPSILON);
increment.push(f64::EPSILON);
}
// Add it to the lower bound
@@ -337,7 +335,7 @@ fn outside<'c>(
Shape::HyperSphere(space_id, center, radius) => {
// Smallest decrement possible, to exclude the surface
let mut radius: f64 = radius.into();
radius -= std::f64::EPSILON;
radius -= f64::EPSILON;
let center: space::Position = center.into();
match core.get_by_shape(
@@ -364,7 +362,7 @@ impl<'e> Executor<'e> for Projection {
) -> Self::ResultSet {
match self {
Projection::Nifti(_, _, _bag) => Err("Proj-Nifti: not yet implemented".to_string()),
Projection::JSON(_, _format, bag) => {
Projection::Json(_, _format, bag) => {
bag.execute(core_id, parameters)
// FIXME: Add projections here
}

View File

@@ -1,17 +1,57 @@
#![forbid(unsafe_code)]
//! # Mercator Parser
//!
//! Query parser for Mercator.
//!
//! ## Mercator: Spatial Index
//!
//! **Mercator** is a spatial *volumetric* index for the
//! [Human Brain Project]. It is a component of the [Knowledge Graph]
//! service, which provides the spatial anchoring for the metadata
//! registered as well as processes the volumetric queries.
//!
//! It is build on top of the Iron Sea database toolkit.
//!
//! ## Iron Sea: Database Toolkit
//! **Iron Sea** provides a set of database engine bricks, which can be
//! combined and applied on arbitrary data structures.
//!
//! Unlike a traditional database, it does not assume a specific
//! physical structure for the tables nor the records, but relies on the
//! developer to provide a set of extractor functions which are used by
//! the specific indices provided.
//!
//! This enables the index implementations to be agnostic from the
//! underlying data structure, and re-used.
//!
//! [Human Brain Project]: http://www.humanbrainproject.eu
//! [Knowledge Graph]: http://www.humanbrainproject.eu/en/explore-the-brain/search/
#[macro_use]
extern crate lalrpop_util;
lalrpop_mod!(#[allow(clippy::all)] pub queries); // synthesized by LALRPOP
lalrpop_mod!(#[allow(clippy::all,unused_parens)] pub queries); // synthesized by LALRPOP
// Note: We do not enable for the whole library deny(missing_docs), as
// it requires the automatically generated parser to be documented
// as well.
// Instead we enable it per modules below, except for the tests.
//#[warn(missing_docs)]
mod evaluators;
//#[warn(missing_docs)]
mod executors;
//#[warn(missing_docs)]
mod expressions;
//#[warn(missing_docs)]
mod predictors;
//#[warn(missing_docs)]
mod validators;
//#[warn(missing_docs)]
mod symbols;
//#[warn(missing_docs)]
mod types;
pub use expressions::Executor;

View File

@@ -7,11 +7,11 @@ use std::io;
use mercator_db::CoreQueryParameters;
use mercator_db::DataBase;
use parser::Executor;
use parser::FiltersParser;
use parser::Predictor;
use parser::QueryParser;
use parser::Validator;
use mercator_parser::Executor;
use mercator_parser::FiltersParser;
use mercator_parser::Predictor;
use mercator_parser::QueryParser;
use mercator_parser::Validator;
fn main() {
// If RUST_LOG is unset, set it to INFO, otherwise keep it as-is.

View File

@@ -8,7 +8,7 @@ impl Predictor for Projection {
fn predict(&self, db: &DataBase) -> Result<f64, String> {
match self {
Projection::Nifti(_, _, bag) => bag.predict(db),
Projection::JSON(_, _, bag) => bag.predict(db),
Projection::Json(_, _, bag) => bag.predict(db),
}
}
}

View File

@@ -51,7 +51,7 @@ JsonOperator: symbols::Projection = {
None => Space::universe().name().clone(),
};
symbols::Projection::JSON(space_id, f, b)
symbols::Projection::Json(space_id, f, b)
}
};

View File

@@ -11,14 +11,14 @@ pub use super::types::*;
#[derive(Clone, Debug)]
pub enum Projection {
Nifti(String, LiteralSelector, Bag),
JSON(String, JsonValue, Bag),
Json(String, JsonValue, Bag),
}
impl Projection {
pub fn space(&self) -> &String {
match self {
Projection::Nifti(space, _, _) => &space,
Projection::JSON(space, _, _) => &space,
Projection::Nifti(space, _, _) => space,
Projection::Json(space, _, _) => space,
}
}
}
@@ -144,7 +144,7 @@ impl Shape {
pub fn volume(&self) -> f64 {
match self {
Shape::Point(_, _) => std::f64::EPSILON, // The smallest non-zero volume possible
Shape::Point(_, _) => f64::EPSILON, // The smallest non-zero volume possible
Shape::HyperRectangle(_space, pos) => {
//TODO: At this time, only aligned to the axes, defined by two points, hyperrectangles are supported.
assert_eq!(pos.len(), 2);
@@ -208,7 +208,7 @@ impl Shape {
}
Shape::Label(_, _) => {
// FIXME: Needs to find a way to figure out the approximate volume of this specific ID, or return MAX or MIN..
std::f64::EPSILON
f64::EPSILON
}
Shape::Nifti(_) => unimplemented!("Nifti"),
}

View File

@@ -11,23 +11,10 @@ pub enum LiteralTypes {
impl PartialEq for LiteralTypes {
fn eq(&self, other: &Self) -> bool {
match self {
LiteralTypes::String => match other {
LiteralTypes::String => true,
_ => false,
},
LiteralTypes::Int => match other {
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Float => match other {
LiteralTypes::Float => true,
LiteralTypes::Int => true,
_ => false,
},
LiteralTypes::Bag(_) => match other {
LiteralTypes::Bag(_) => true,
_ => false,
},
LiteralTypes::String => matches!(other, LiteralTypes::String),
LiteralTypes::Int => matches!(other, LiteralTypes::Int),
LiteralTypes::Float => matches!(other, LiteralTypes::Float | LiteralTypes::Int),
LiteralTypes::Bag(_) => matches!(other, LiteralTypes::Bag(_)),
LiteralTypes::Vector(v) => match other {
LiteralTypes::Vector(ov) => {
let n = v.len();

View File

@@ -9,7 +9,7 @@ impl Validator for Projection {
fn validate(&self) -> ValidationResult {
match self {
Projection::Nifti(_, _, _) => Err("not yet implemented".to_string()),
Projection::JSON(_, _format, bag) => bag.validate(),
Projection::Json(_, _format, bag) => bag.validate(),
//FIXME: Add support for projections
/* match format.validate() {
Ok(_) => bag.validate(),