Start writing rust backend
This commit is contained in:
parent
97c5abc38b
commit
959dac332d
@ -8,7 +8,8 @@ repository = "https://github.com/ecmwf/qubed"
|
|||||||
# rsfdb = {git = "https://github.com/ecmwf/rsfdb", branch = "develop"}
|
# rsfdb = {git = "https://github.com/ecmwf/rsfdb", branch = "develop"}
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
pyo3 = "0.23"
|
pyo3 = "0.25"
|
||||||
|
lasso = "0.7.3"
|
||||||
|
|
||||||
[package.metadata.maturin]
|
[package.metadata.maturin]
|
||||||
version-from-git = true
|
version-from-git = true
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
18996
config/language/paramids.yaml
Normal file
18996
config/language/paramids.yaml
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,7 @@ from .metadata import from_nodes
|
|||||||
from .protobuf.adapters import proto_to_qube, qube_to_proto
|
from .protobuf.adapters import proto_to_qube, qube_to_proto
|
||||||
from .tree_formatters import (
|
from .tree_formatters import (
|
||||||
HTML,
|
HTML,
|
||||||
|
_display,
|
||||||
node_tree_to_html,
|
node_tree_to_html,
|
||||||
node_tree_to_string,
|
node_tree_to_string,
|
||||||
)
|
)
|
||||||
@ -76,14 +77,7 @@ class Qube:
|
|||||||
)
|
)
|
||||||
children: tuple[Qube, ...] = ()
|
children: tuple[Qube, ...] = ()
|
||||||
is_root: bool = False
|
is_root: bool = False
|
||||||
|
is_leaf: bool = False
|
||||||
def replace(self, **kwargs) -> Qube:
|
|
||||||
return dataclasses.replace(self, **kwargs)
|
|
||||||
|
|
||||||
def summary(self) -> str:
|
|
||||||
if self.is_root:
|
|
||||||
return self.key
|
|
||||||
return f"{self.key}={self.values.summary()}" if self.key != "root" else "root"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def make_node(
|
def make_node(
|
||||||
@ -93,7 +87,9 @@ class Qube:
|
|||||||
children: Iterable[Qube],
|
children: Iterable[Qube],
|
||||||
metadata: dict[str, np.ndarray] = {},
|
metadata: dict[str, np.ndarray] = {},
|
||||||
is_root: bool = False,
|
is_root: bool = False,
|
||||||
|
is_leaf: bool | None = None,
|
||||||
) -> Qube:
|
) -> Qube:
|
||||||
|
children = tuple(sorted(children, key=lambda n: ((n.key, n.values.min()))))
|
||||||
if isinstance(values, ValueGroup):
|
if isinstance(values, ValueGroup):
|
||||||
values = values
|
values = values
|
||||||
else:
|
else:
|
||||||
@ -102,9 +98,10 @@ class Qube:
|
|||||||
return cls(
|
return cls(
|
||||||
key,
|
key,
|
||||||
values=values,
|
values=values,
|
||||||
children=tuple(sorted(children, key=lambda n: ((n.key, n.values.min())))),
|
children=children,
|
||||||
metadata=frozendict(metadata),
|
metadata=frozendict(metadata),
|
||||||
is_root=is_root,
|
is_root=is_root,
|
||||||
|
is_leaf=(not len(children)) if is_leaf is None else is_leaf,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -117,6 +114,14 @@ class Qube:
|
|||||||
is_root=True,
|
is_root=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def replace(self, **kwargs) -> Qube:
|
||||||
|
return dataclasses.replace(self, **kwargs)
|
||||||
|
|
||||||
|
def summary(self) -> str:
|
||||||
|
if self.is_root:
|
||||||
|
return self.key
|
||||||
|
return f"{self.key}={self.values.summary()}" if self.key != "root" else "root"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, path: str | Path) -> Qube:
|
def load(cls, path: str | Path) -> Qube:
|
||||||
with open(path, "r") as f:
|
with open(path, "r") as f:
|
||||||
@ -174,6 +179,17 @@ class Qube:
|
|||||||
for k, children in d.items():
|
for k, children in d.items():
|
||||||
key, values = k.split("=")
|
key, values = k.split("=")
|
||||||
values = values.split("/")
|
values = values.split("/")
|
||||||
|
# children == {"..." : {}}
|
||||||
|
# is a special case to represent trees with leaves we don't know about
|
||||||
|
if frozendict(children) == frozendict({"...": {}}):
|
||||||
|
yield Qube.make_node(
|
||||||
|
key=key,
|
||||||
|
values=values,
|
||||||
|
children={},
|
||||||
|
is_leaf=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Special case for Wildcard values
|
||||||
if values == ["*"]:
|
if values == ["*"]:
|
||||||
values = WildcardGroup()
|
values = WildcardGroup()
|
||||||
else:
|
else:
|
||||||
@ -473,7 +489,6 @@ class Qube:
|
|||||||
selection: dict[str, str | list[str] | Callable[[Any], bool]],
|
selection: dict[str, str | list[str] | Callable[[Any], bool]],
|
||||||
mode: Literal["strict", "relaxed"] = "relaxed",
|
mode: Literal["strict", "relaxed"] = "relaxed",
|
||||||
consume=False,
|
consume=False,
|
||||||
require_match=False,
|
|
||||||
) -> Qube:
|
) -> Qube:
|
||||||
# Find any bare str values and replace them with [str]
|
# Find any bare str values and replace them with [str]
|
||||||
_selection: dict[str, list[str] | Callable[[Any], bool]] = {}
|
_selection: dict[str, list[str] | Callable[[Any], bool]] = {}
|
||||||
@ -506,11 +521,11 @@ class Qube:
|
|||||||
if mode == "strict":
|
if mode == "strict":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# If this node doesn't exist in the
|
|
||||||
elif mode == "next_level":
|
elif mode == "next_level":
|
||||||
return node.replace(
|
return node.replace(
|
||||||
children=(),
|
children=(),
|
||||||
metadata=self.metadata | {"is_leaf": not bool(self.children)},
|
metadata=self.metadata
|
||||||
|
| {"is_leaf": np.array([not bool(node.children)])},
|
||||||
)
|
)
|
||||||
|
|
||||||
elif mode == "relaxed":
|
elif mode == "relaxed":
|
||||||
@ -539,23 +554,22 @@ class Qube:
|
|||||||
if consume:
|
if consume:
|
||||||
selection = {k: v for k, v in selection.items() if k != node.key}
|
selection = {k: v for k, v in selection.items() if k != node.key}
|
||||||
|
|
||||||
# prune branches with no matches
|
|
||||||
if require_match and not node.children and not matched:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Prune nodes that had had all their children pruned
|
# Prune nodes that had had all their children pruned
|
||||||
new_children = not_none(
|
new_children = not_none(
|
||||||
select(c, selection, matched) for c in node.children
|
select(c, selection, matched) for c in node.children
|
||||||
)
|
)
|
||||||
# if node.key == "dataset": print(prune, [(c.key, c.values.values) for c in node.children], [c.key for c in new_children])
|
|
||||||
|
|
||||||
if node.children and not new_children:
|
if node.children and not new_children:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
metadata = dict(node.metadata)
|
||||||
|
|
||||||
|
if mode == "next_level":
|
||||||
|
metadata["is_leaf"] = np.array([not bool(node.children)])
|
||||||
|
|
||||||
return node.replace(
|
return node.replace(
|
||||||
children=new_children,
|
children=new_children,
|
||||||
metadata=dict(self.metadata)
|
metadata=metadata,
|
||||||
| ({"is_leaf": not bool(new_children)} if mode == "next_level" else {}),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.replace(
|
return self.replace(
|
||||||
@ -659,3 +673,6 @@ class Qube:
|
|||||||
return node.replace(metadata=frozendict({}))
|
return node.replace(metadata=frozendict({}))
|
||||||
|
|
||||||
return self.transform(strip)
|
return self.transform(strip)
|
||||||
|
|
||||||
|
def display(self):
|
||||||
|
_display(self)
|
||||||
|
@ -82,7 +82,7 @@ def qube_to_proto(q: Qube) -> bytes:
|
|||||||
def _proto_to_qube(cls: type, msg: qube_pb2.Qube) -> Qube:
|
def _proto_to_qube(cls: type, msg: qube_pb2.Qube) -> Qube:
|
||||||
"""protobuf Qube message → frozen Qube dataclass (new object)."""
|
"""protobuf Qube message → frozen Qube dataclass (new object)."""
|
||||||
|
|
||||||
return cls(
|
return cls.make_node(
|
||||||
key=msg.key,
|
key=msg.key,
|
||||||
values=_valuegroup_to_py(msg.values),
|
values=_valuegroup_to_py(msg.values),
|
||||||
metadata=frozendict(
|
metadata=frozendict(
|
||||||
|
@ -193,7 +193,9 @@ def operation(
|
|||||||
|
|
||||||
|
|
||||||
def get_indices(metadata: dict[str, np.ndarray], indices: list[int] | slice):
|
def get_indices(metadata: dict[str, np.ndarray], indices: list[int] | slice):
|
||||||
return {k: v[..., indices] for k, v in metadata.items()}
|
return {
|
||||||
|
k: v[..., indices] for k, v in metadata.items() if isinstance(v, np.ndarray)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _operation(
|
def _operation(
|
||||||
|
@ -4,6 +4,10 @@ import random
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING, Callable, Iterable
|
from typing import TYPE_CHECKING, Callable, Iterable
|
||||||
|
|
||||||
|
try:
|
||||||
|
from IPython.display import display
|
||||||
|
except ImportError:
|
||||||
|
display = None
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .Qube import Qube
|
from .Qube import Qube
|
||||||
@ -29,8 +33,7 @@ def summarize_node(
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
summary = node.summary(**kwargs)
|
summary = node.summary(**kwargs)
|
||||||
if "is_leaf" in node.metadata and node.metadata["is_leaf"]:
|
|
||||||
summary += " 🌿"
|
|
||||||
paths.append(summary)
|
paths.append(summary)
|
||||||
if len(summary) > max_summary_length:
|
if len(summary) > max_summary_length:
|
||||||
summary = summary[:max_summary_length] + "..."
|
summary = summary[:max_summary_length] + "..."
|
||||||
@ -43,6 +46,10 @@ def summarize_node(
|
|||||||
break
|
break
|
||||||
node = node.children[0]
|
node = node.children[0]
|
||||||
|
|
||||||
|
# Add a "..." to represent nodes that we don't know about
|
||||||
|
if (not node.children) and (not node.is_leaf):
|
||||||
|
summaries.append("...")
|
||||||
|
|
||||||
return ", ".join(summaries), ",".join(paths), node
|
return ", ".join(summaries), ",".join(paths), node
|
||||||
|
|
||||||
|
|
||||||
@ -111,6 +118,12 @@ def summarize_node_html(
|
|||||||
break
|
break
|
||||||
node = node.children[0]
|
node = node.children[0]
|
||||||
|
|
||||||
|
if (not node.children) and (not node.is_leaf):
|
||||||
|
summary = (
|
||||||
|
'<span class="qubed-node" data-path="" title="Truncated Nodes">...</span>'
|
||||||
|
)
|
||||||
|
summaries.append(summary)
|
||||||
|
|
||||||
return ", ".join(summaries), node
|
return ", ".join(summaries), node
|
||||||
|
|
||||||
|
|
||||||
@ -239,3 +252,20 @@ def node_tree_to_html(
|
|||||||
""".replace("CSS_ID", css_id)
|
""".replace("CSS_ID", css_id)
|
||||||
nodes = "".join(_node_tree_to_html(node=node, depth=depth, info=info, **kwargs))
|
nodes = "".join(_node_tree_to_html(node=node, depth=depth, info=info, **kwargs))
|
||||||
return f"{js if include_js else ''}{css if include_css else ''}<pre class='qubed-tree' id='{css_id}'>{nodes}</pre>"
|
return f"{js if include_js else ''}{css if include_css else ''}<pre class='qubed-tree' id='{css_id}'>{nodes}</pre>"
|
||||||
|
|
||||||
|
|
||||||
|
def _display(qube: Qube, **kwargs):
|
||||||
|
if display is None:
|
||||||
|
print(qube)
|
||||||
|
else:
|
||||||
|
|
||||||
|
def info(node: Qube):
|
||||||
|
return f"""\
|
||||||
|
structural_hash = {node.structural_hash}
|
||||||
|
metadata = {dict(node.metadata)}
|
||||||
|
is_root = {node.is_root}
|
||||||
|
is_leaf = {node.is_leaf}
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs = {"info": info} | kwargs
|
||||||
|
display(qube.html(**kwargs))
|
||||||
|
76
src/rust/fdb.rs
Normal file
76
src/rust/fdb.rs
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
use rsfdb::listiterator::KeyValueLevel;
|
||||||
|
use rsfdb::request::Request;
|
||||||
|
use rsfdb::FDB;
|
||||||
|
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub mod tree;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use tree::TreeNode;
|
||||||
|
|
||||||
|
#[pyclass(unsendable)]
|
||||||
|
pub struct PyFDB {
|
||||||
|
pub fdb: FDB,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pymethods]
|
||||||
|
impl PyFDB {
|
||||||
|
#[new]
|
||||||
|
#[pyo3(signature = (fdb_config=None))]
|
||||||
|
pub fn new(fdb_config: Option<&str>) -> PyResult<Self> {
|
||||||
|
let fdb = FDB::new(fdb_config)
|
||||||
|
.map_err(|e| PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(e.to_string()))?;
|
||||||
|
Ok(PyFDB { fdb })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Traverse the FDB with the given request.
|
||||||
|
pub fn traverse_fdb(
|
||||||
|
&self,
|
||||||
|
py: Python<'_>,
|
||||||
|
request: HashMap<String, Vec<String>>,
|
||||||
|
) -> PyResult<PyObject> {
|
||||||
|
let start_time = Instant::now();
|
||||||
|
|
||||||
|
let list_request = Request::from_json(json!(request))
|
||||||
|
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))?;
|
||||||
|
|
||||||
|
// Use `fdb_guard` instead of `self.fdb`
|
||||||
|
let list = self
|
||||||
|
.fdb
|
||||||
|
.list(&list_request, true, true)
|
||||||
|
.map_err(|e| PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(e.to_string()))?;
|
||||||
|
|
||||||
|
let mut root = TreeNode::new(KeyValueLevel {
|
||||||
|
key: "root".to_string(),
|
||||||
|
value: "root".to_string(),
|
||||||
|
level: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
for item in list {
|
||||||
|
py.check_signals()?;
|
||||||
|
|
||||||
|
if let Some(request) = &item.request {
|
||||||
|
root.insert(&request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let duration = start_time.elapsed();
|
||||||
|
println!("Total runtime: {:?}", duration);
|
||||||
|
|
||||||
|
let py_dict = root.to_py_dict(py)?;
|
||||||
|
Ok(py_dict)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
use pyo3::prelude::*;
|
||||||
|
|
||||||
|
#[pymodule]
|
||||||
|
fn rust(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||||
|
m.add_class::<PyFDB>()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
51
src/rust/json.rs
Normal file
51
src/rust/json.rs
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
use pyo3::prelude::*;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::{Result, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use crate::qube::{Node, NodeId, Qube};
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
enum Values {
|
||||||
|
Wildcard(String),
|
||||||
|
Enum(Vec<String>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
struct JSONQube {
|
||||||
|
key: String,
|
||||||
|
values: Values,
|
||||||
|
metadata: HashMap<String, String>,
|
||||||
|
children: Vec<JSONQube>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_nodes(qube: &mut Qube, parent: NodeId, nodes: &[JSONQube]) -> Vec<NodeId> {
|
||||||
|
nodes
|
||||||
|
.iter()
|
||||||
|
.map(|json_node| {
|
||||||
|
let values = match &json_node.values {
|
||||||
|
Values::Wildcard(_) => &vec!["*"],
|
||||||
|
Values::Enum(strings) => &strings.iter().map(|s| s.as_str()).collect(),
|
||||||
|
};
|
||||||
|
let node_id = qube.add_node(parent, &json_node.key, values);
|
||||||
|
|
||||||
|
//
|
||||||
|
add_nodes(qube, node_id, &json_node.children);
|
||||||
|
node_id
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pyfunction]
|
||||||
|
pub fn parse_qube() -> PyResult<Qube> {
|
||||||
|
let data = r#"{"key": "root", "values": ["root"], "metadata": {}, "children": [{"key": "frequency", "values": "*", "metadata": {}, "children": [{"key": "levtype", "values": "*", "metadata": {}, "children": [{"key": "param", "values": "*", "metadata": {}, "children": [{"key": "levelist", "values": "*", "metadata": {}, "children": [{"key": "domain", "values": ["a", "b", "c", "d"], "metadata": {}, "children": []}]}]}]}]}]}"#;
|
||||||
|
|
||||||
|
// Parse the string of data into serde_json::Value.
|
||||||
|
let json_qube: JSONQube = serde_json::from_str(data).expect("JSON parsing failed");
|
||||||
|
|
||||||
|
let mut qube = Qube::new();
|
||||||
|
let root = qube.root;
|
||||||
|
add_nodes(&mut qube, root, &json_qube.children);
|
||||||
|
Ok(qube)
|
||||||
|
}
|
134
src/rust/lib.rs
134
src/rust/lib.rs
@ -1,140 +1,16 @@
|
|||||||
#![allow(unused_imports)]
|
#![allow(unused_imports)]
|
||||||
// #![allow(dead_code)]
|
|
||||||
// #![allow(unused_variables)]
|
|
||||||
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use pyo3::prelude::*;
|
use pyo3::prelude::*;
|
||||||
use pyo3::wrap_pyfunction;
|
use pyo3::wrap_pyfunction;
|
||||||
use pyo3::types::{PyDict, PyInt, PyList, PyString};
|
use pyo3::types::{PyDict, PyInt, PyList, PyString};
|
||||||
|
|
||||||
#[pyfunction]
|
mod qube;
|
||||||
fn hello(_py: Python, name: &str) -> PyResult<String> {
|
mod json;
|
||||||
Ok(format!("Hello, {}!", name))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pymodule]
|
#[pymodule]
|
||||||
fn rust(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
fn rust(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||||
m.add_function(wrap_pyfunction!(hello, m)?).unwrap();
|
m.add_class::<qube::Qube>()?;
|
||||||
|
m.add_function(wrap_pyfunction!(json::parse_qube, m)?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
|
|
||||||
struct NodeId(usize);
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
|
|
||||||
struct StringId(usize);
|
|
||||||
|
|
||||||
struct Node {
|
|
||||||
key: StringId,
|
|
||||||
metadata: HashMap<StringId, Vec<String>>,
|
|
||||||
parent: NodeId,
|
|
||||||
values: Vec<String>,
|
|
||||||
children: HashMap<StringId, Vec<NodeId>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
struct Qube {
|
|
||||||
root: NodeId,
|
|
||||||
nodes: Vec<Node>,
|
|
||||||
strings: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
use std::ops;
|
|
||||||
|
|
||||||
impl ops::Index<StringId> for Qube {
|
|
||||||
type Output = str;
|
|
||||||
|
|
||||||
fn index(&self, index: StringId) -> &str {
|
|
||||||
&self.strings[index.0]
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ops::Index<NodeId> for Qube {
|
|
||||||
type Output = Node;
|
|
||||||
|
|
||||||
fn index(&self, index: NodeId) -> &Node {
|
|
||||||
&self.nodes[index.0]
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// use rsfdb::listiterator::KeyValueLevel;
|
|
||||||
// use rsfdb::request::Request;
|
|
||||||
// use rsfdb::FDB;
|
|
||||||
|
|
||||||
// use serde_json::{json, Value};
|
|
||||||
// use std::time::Instant;
|
|
||||||
|
|
||||||
|
|
||||||
// use std::collections::HashMap;
|
|
||||||
|
|
||||||
// pub mod tree;
|
|
||||||
// use std::sync::Arc;
|
|
||||||
// use std::sync::Mutex;
|
|
||||||
// use tree::TreeNode;
|
|
||||||
|
|
||||||
// #[pyclass(unsendable)]
|
|
||||||
// pub struct PyFDB {
|
|
||||||
// pub fdb: FDB,
|
|
||||||
// }
|
|
||||||
|
|
||||||
// #[pymethods]
|
|
||||||
// impl PyFDB {
|
|
||||||
// #[new]
|
|
||||||
// #[pyo3(signature = (fdb_config=None))]
|
|
||||||
// pub fn new(fdb_config: Option<&str>) -> PyResult<Self> {
|
|
||||||
// let fdb = FDB::new(fdb_config)
|
|
||||||
// .map_err(|e| PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(e.to_string()))?;
|
|
||||||
// Ok(PyFDB { fdb })
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /// Traverse the FDB with the given request.
|
|
||||||
// pub fn traverse_fdb(
|
|
||||||
// &self,
|
|
||||||
// py: Python<'_>,
|
|
||||||
// request: HashMap<String, Vec<String>>,
|
|
||||||
// ) -> PyResult<PyObject> {
|
|
||||||
// let start_time = Instant::now();
|
|
||||||
|
|
||||||
// let list_request = Request::from_json(json!(request))
|
|
||||||
// .map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))?;
|
|
||||||
|
|
||||||
// // Use `fdb_guard` instead of `self.fdb`
|
|
||||||
// let list = self
|
|
||||||
// .fdb
|
|
||||||
// .list(&list_request, true, true)
|
|
||||||
// .map_err(|e| PyErr::new::<pyo3::exceptions::PyRuntimeError, _>(e.to_string()))?;
|
|
||||||
|
|
||||||
// let mut root = TreeNode::new(KeyValueLevel {
|
|
||||||
// key: "root".to_string(),
|
|
||||||
// value: "root".to_string(),
|
|
||||||
// level: 0,
|
|
||||||
// });
|
|
||||||
|
|
||||||
// for item in list {
|
|
||||||
// py.check_signals()?;
|
|
||||||
|
|
||||||
// if let Some(request) = &item.request {
|
|
||||||
// root.insert(&request);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// let duration = start_time.elapsed();
|
|
||||||
// println!("Total runtime: {:?}", duration);
|
|
||||||
|
|
||||||
// let py_dict = root.to_py_dict(py)?;
|
|
||||||
// Ok(py_dict)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// use pyo3::prelude::*;
|
|
||||||
|
|
||||||
// #[pymodule]
|
|
||||||
// fn rust(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
|
||||||
// m.add_class::<PyFDB>()?;
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
205
src/rust/qube.rs
Normal file
205
src/rust/qube.rs
Normal file
@ -0,0 +1,205 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::hash::Hash;
|
||||||
|
|
||||||
|
use lasso::{Rodeo, Spur};
|
||||||
|
use pyo3::prelude::*;
|
||||||
|
use pyo3::types::PyList;
|
||||||
|
use std::num::NonZero;
|
||||||
|
use std::ops;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
// This data structure uses the Newtype Index Pattern
|
||||||
|
// See https://matklad.github.io/2018/06/04/newtype-index-pattern.html
|
||||||
|
// See also https://github.com/nrc/r4cppp/blob/master/graphs/README.md#rcrefcellnode for a discussion of other approaches to trees and graphs in rust.
|
||||||
|
// https://smallcultfollowing.com/babysteps/blog/2015/04/06/modeling-graphs-in-rust-using-vector-indices/
|
||||||
|
|
||||||
|
// Index types use struct Id(NonZero<usize>)
|
||||||
|
// This reserves 0 as a special value which allows Option<Id(NonZero<usize>)> to be the same size as usize.
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
|
||||||
|
pub(crate) struct NodeId(NonZero<usize>);
|
||||||
|
|
||||||
|
// Allow node indices to index directly into Qubes:
|
||||||
|
impl ops::Index<NodeId> for Qube {
|
||||||
|
type Output = Node;
|
||||||
|
|
||||||
|
fn index(&self, index: NodeId) -> &Node {
|
||||||
|
&self.nodes[index.0.get() - 1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ops::IndexMut<NodeId> for Qube {
|
||||||
|
fn index_mut(&mut self, index: NodeId) -> &mut Node {
|
||||||
|
&mut self.nodes[index.0.get() - 1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NodeId {
|
||||||
|
pub fn new_infallible(value: NonZero<usize>) -> NodeId {
|
||||||
|
NodeId(value)
|
||||||
|
}
|
||||||
|
pub fn new(value: usize) -> Option<NodeId> {
|
||||||
|
NonZero::new(value).map(NodeId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
|
||||||
|
struct StringId(lasso::Spur);
|
||||||
|
|
||||||
|
impl ops::Index<StringId> for lasso::Rodeo {
|
||||||
|
type Output = str;
|
||||||
|
|
||||||
|
fn index(&self, index: StringId) -> &str {
|
||||||
|
&self[index.0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct Node {
|
||||||
|
key: StringId,
|
||||||
|
metadata: HashMap<StringId, Vec<String>>,
|
||||||
|
parent: Option<NodeId>, // If not present, it's the root node
|
||||||
|
values: Vec<StringId>,
|
||||||
|
children: HashMap<StringId, Vec<NodeId>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pyclass]
|
||||||
|
pub struct NodeRef {
|
||||||
|
id: NodeId,
|
||||||
|
qube: Py<Qube>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pymethods]
|
||||||
|
impl NodeRef {
|
||||||
|
fn __repr__(&self, py: Python) -> PyResult<String> {
|
||||||
|
let qube = self.qube.bind(py).borrow();
|
||||||
|
let node = &qube[self.id];
|
||||||
|
let key = &qube.strings[node.key];
|
||||||
|
let children = self
|
||||||
|
.get_children(py)
|
||||||
|
.iter()
|
||||||
|
.map(|child| child.__repr__(py))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?
|
||||||
|
.join(", ");
|
||||||
|
|
||||||
|
Ok(format!("Node({}, {})", key, children))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn __str__(&self, py: Python) -> String {
|
||||||
|
let qube = self.qube.bind(py).borrow();
|
||||||
|
let node = &qube[self.id];
|
||||||
|
let key = &qube.strings[node.key];
|
||||||
|
format!("Node({})", key)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[getter]
|
||||||
|
pub fn get_children(&self, py: Python) -> Vec<NodeRef> {
|
||||||
|
let qube = self.qube.bind(py).borrow();
|
||||||
|
let node = &qube[self.id];
|
||||||
|
node.children
|
||||||
|
.values()
|
||||||
|
.flatten()
|
||||||
|
.map(|child_id| NodeRef {
|
||||||
|
id: *child_id,
|
||||||
|
qube: self.qube.clone_ref(py),
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Node {
|
||||||
|
fn new_root(q: &mut Qube) -> Node {
|
||||||
|
Node {
|
||||||
|
key: q.get_or_intern("root"),
|
||||||
|
metadata: HashMap::new(),
|
||||||
|
parent: None,
|
||||||
|
values: vec![],
|
||||||
|
children: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn children(&self) -> impl Iterator<Item = &NodeId> {
|
||||||
|
self.children.values().flatten()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[pyclass]
|
||||||
|
pub struct Qube {
|
||||||
|
pub root: NodeId,
|
||||||
|
nodes: Vec<Node>,
|
||||||
|
strings: Rodeo,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Qube {
|
||||||
|
fn get_or_intern(&mut self, val: &str) -> StringId {
|
||||||
|
StringId(self.strings.get_or_intern(val))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_node(&mut self, parent: NodeId, key: &str, values: &[&str]) -> NodeId {
|
||||||
|
let key_id = self.get_or_intern(key);
|
||||||
|
let values = values.iter().map(|val| self.get_or_intern(val)).collect();
|
||||||
|
|
||||||
|
// Create the node object
|
||||||
|
let node = Node {
|
||||||
|
key: key_id,
|
||||||
|
metadata: HashMap::new(),
|
||||||
|
values: values,
|
||||||
|
parent: Some(parent),
|
||||||
|
children: HashMap::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Insert it into the Qube arena and determine its id
|
||||||
|
self.nodes.push(node);
|
||||||
|
let node_id = NodeId::new(self.nodes.len()).unwrap();
|
||||||
|
|
||||||
|
// Add a reference to this node's id to the parents list of children.
|
||||||
|
let parent_node = &mut self[parent];
|
||||||
|
let key_group = parent_node.children.entry(key_id).or_insert(Vec::new());
|
||||||
|
key_group.push(node_id);
|
||||||
|
|
||||||
|
node_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pymethods]
|
||||||
|
impl Qube {
|
||||||
|
#[new]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let mut q = Qube {
|
||||||
|
root: NodeId::new(1).unwrap(),
|
||||||
|
nodes: Vec::new(),
|
||||||
|
strings: Rodeo::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let root = Node::new_root(&mut q);
|
||||||
|
q.nodes.push(root);
|
||||||
|
q
|
||||||
|
}
|
||||||
|
|
||||||
|
#[getter]
|
||||||
|
fn get_root<'py>(slf: PyRef<'py, Self>, py: Python<'py>) -> PyResult<NodeRef> {
|
||||||
|
Ok(NodeRef {
|
||||||
|
id: slf.root,
|
||||||
|
qube: slf.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn __repr__(&self) -> String {
|
||||||
|
format!("{:?}", &self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn __str__<'py>(slf: PyRef<'py, Self>, py: Python<'py>) -> String {
|
||||||
|
format!("Qube()")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[getter]
|
||||||
|
pub fn get_children<'py>(slf: PyRef<'py, Self>, py: Python<'py>) -> PyResult<Vec<NodeRef>> {
|
||||||
|
let root = NodeRef {
|
||||||
|
id: slf.root,
|
||||||
|
// `into_py` clones the existing Python handle; no new Qube object is allocated.
|
||||||
|
qube: slf.into(),
|
||||||
|
};
|
||||||
|
Ok(root.get_children(py))
|
||||||
|
}
|
||||||
|
}
|
@ -1,82 +0,0 @@
|
|||||||
// use pyo3::prelude::*;
|
|
||||||
// use pyo3::types::PyDict;
|
|
||||||
// use rsfdb::listiterator::KeyValueLevel;
|
|
||||||
// use serde_json::Value;
|
|
||||||
|
|
||||||
// #[derive(Debug)]
|
|
||||||
// pub struct TreeNode {
|
|
||||||
// pub key: KeyValueLevel,
|
|
||||||
// pub children: Vec<TreeNode>,
|
|
||||||
// }
|
|
||||||
|
|
||||||
// impl TreeNode {
|
|
||||||
// pub fn new(key: KeyValueLevel) -> Self {
|
|
||||||
// TreeNode {
|
|
||||||
// key,
|
|
||||||
// children: Vec::new(),
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// pub fn insert(&mut self, path: &[KeyValueLevel]) {
|
|
||||||
// if path.is_empty() {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// let kvl = &path[0];
|
|
||||||
|
|
||||||
// // Check if a child with the same key and value exists
|
|
||||||
// if let Some(child) = self.children.iter_mut().find(|child| child.key == *kvl) {
|
|
||||||
// // Insert the remaining path into the existing child
|
|
||||||
// child.insert(&path[1..]);
|
|
||||||
// } else {
|
|
||||||
// // Create a new child node
|
|
||||||
// let mut new_child = TreeNode::new(kvl.clone());
|
|
||||||
// new_child.insert(&path[1..]);
|
|
||||||
// self.children.push(new_child);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// pub fn traverse<F>(&self, level: usize, callback: &F)
|
|
||||||
// where
|
|
||||||
// F: Fn(&TreeNode, usize),
|
|
||||||
// {
|
|
||||||
// callback(self, level);
|
|
||||||
// for child in &self.children {
|
|
||||||
// child.traverse(level + 1, callback);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// pub fn to_json(&self) -> Value {
|
|
||||||
// let formatted_key = format!("{}={}", self.key.key, self.key.value);
|
|
||||||
|
|
||||||
// let children_json: Value = if self.children.is_empty() {
|
|
||||||
// Value::Object(serde_json::Map::new())
|
|
||||||
// } else {
|
|
||||||
// Value::Object(
|
|
||||||
// self.children
|
|
||||||
// .iter()
|
|
||||||
// .map(|child| {
|
|
||||||
// (
|
|
||||||
// format!("{}={}", child.key.key, child.key.value),
|
|
||||||
// child.to_json(),
|
|
||||||
// )
|
|
||||||
// })
|
|
||||||
// .collect(),
|
|
||||||
// )
|
|
||||||
// };
|
|
||||||
|
|
||||||
// // Combine the formatted key with children
|
|
||||||
// serde_json::json!({ formatted_key: children_json })
|
|
||||||
// }
|
|
||||||
|
|
||||||
// pub fn to_py_dict(&self, py: Python) -> PyResult<PyObject> {
|
|
||||||
// let py_dict = PyDict::new(py);
|
|
||||||
|
|
||||||
// for child in &self.children {
|
|
||||||
// let child_key = format!("{}={}", child.key.key, child.key.value);
|
|
||||||
// py_dict.set_item(child_key, child.to_py_dict(py)?)?;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// Ok(py_dict.to_object(py))
|
|
||||||
// }
|
|
||||||
// }
|
|
@ -58,8 +58,14 @@ if "LOCAL_CACHE" in os.environ:
|
|||||||
with open("../tests/example_qubes/extremes_dt.json") as f:
|
with open("../tests/example_qubes/extremes_dt.json") as f:
|
||||||
qubes["climate-dt"] = qubes["climate-dt"] | Qube.from_json(json.load(f))
|
qubes["climate-dt"] = qubes["climate-dt"] | Qube.from_json(json.load(f))
|
||||||
|
|
||||||
with open("../config/climate-dt/language.yaml", "r") as f:
|
with open("../tests/example_qubes/od.json") as f:
|
||||||
|
qubes["climate-dt"] = qubes["climate-dt"] | Qube.from_json(json.load(f))
|
||||||
|
|
||||||
|
with open("../config/language/language.yaml", "r") as f:
|
||||||
mars_language = yaml.safe_load(f)["_field"]
|
mars_language = yaml.safe_load(f)["_field"]
|
||||||
|
|
||||||
|
with open("../config/language/paramids.yaml", "r") as f:
|
||||||
|
params = yaml.safe_load(f)
|
||||||
else:
|
else:
|
||||||
print("Getting climate and extremes dt data from github")
|
print("Getting climate and extremes dt data from github")
|
||||||
qubes["climate-dt"] = Qube.from_json(
|
qubes["climate-dt"] = Qube.from_json(
|
||||||
@ -172,11 +178,11 @@ async def union(
|
|||||||
|
|
||||||
|
|
||||||
def follow_query(request: dict[str, str | list[str]], qube: Qube):
|
def follow_query(request: dict[str, str | list[str]], qube: Qube):
|
||||||
s = qube.select(request, mode="next_level", prune=True, consume=False)
|
s = qube.select(request, mode="next_level", consume=False)
|
||||||
by_path = defaultdict(lambda: {"paths": set(), "values": set()})
|
by_path = defaultdict(lambda: {"paths": set(), "values": set()})
|
||||||
|
|
||||||
for request, node in s.leaf_nodes():
|
for request, node in s.leaf_nodes():
|
||||||
if not node.metadata["is_leaf"]:
|
if not node.metadata.get("is_leaf", True):
|
||||||
by_path[node.key]["values"].update(node.values.values)
|
by_path[node.key]["values"].update(node.values.values)
|
||||||
by_path[node.key]["paths"].add(frozendict(request))
|
by_path[node.key]["paths"].add(frozendict(request))
|
||||||
|
|
||||||
@ -282,18 +288,32 @@ async def get_STAC(
|
|||||||
def make_link(key_name, paths, values):
|
def make_link(key_name, paths, values):
|
||||||
"""Take a MARS Key and information about which paths matched up to this point and use it to make a STAC Link"""
|
"""Take a MARS Key and information about which paths matched up to this point and use it to make a STAC Link"""
|
||||||
href_template = f"/stac?{request_params}{'&' if request_params else ''}{key_name}={{{key_name}}}"
|
href_template = f"/stac?{request_params}{'&' if request_params else ''}{key_name}={{{key_name}}}"
|
||||||
values_from_mars_language = mars_language.get(key_name, {}).get("values", [])
|
|
||||||
|
|
||||||
if all(isinstance(v, list) for v in values_from_mars_language):
|
print(f"{key_name = }")
|
||||||
value_descriptions_dict = {
|
if key_name == "param":
|
||||||
k: v[-1]
|
print(params)
|
||||||
for v in values_from_mars_language
|
values_from_mars_language = params
|
||||||
if len(v) > 1
|
value_descriptions = [
|
||||||
for k in v[:-1]
|
max(params.get(int(v), [""]), key=len) for v in values
|
||||||
}
|
]
|
||||||
value_descriptions = [value_descriptions_dict.get(v, "") for v in values]
|
print(value_descriptions)
|
||||||
if not any(value_descriptions):
|
else:
|
||||||
value_descriptions = None
|
values_from_mars_language = mars_language.get(key_name, {}).get(
|
||||||
|
"values", []
|
||||||
|
)
|
||||||
|
|
||||||
|
if all(isinstance(v, list) for v in values_from_mars_language):
|
||||||
|
value_descriptions_dict = {
|
||||||
|
k: v[-1]
|
||||||
|
for v in values_from_mars_language
|
||||||
|
if len(v) > 1
|
||||||
|
for k in v[:-1]
|
||||||
|
}
|
||||||
|
value_descriptions = [
|
||||||
|
value_descriptions_dict.get(v, "") for v in values
|
||||||
|
]
|
||||||
|
if not any(value_descriptions):
|
||||||
|
value_descriptions = None
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"title": key_name,
|
"title": key_name,
|
||||||
|
@ -9,7 +9,7 @@ pre#qube {
|
|||||||
margin-left: 0;
|
margin-left: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.qubed-node a {
|
.qubed-level a {
|
||||||
margin-left: 10px;
|
margin-left: 10px;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
@ -23,7 +23,7 @@ pre#qube {
|
|||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
|
||||||
summary:hover,span.leaf:hover {
|
span.qubed-node:hover {
|
||||||
background-color: #f0f0f0;
|
background-color: #f0f0f0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,7 +35,7 @@ pre#qube {
|
|||||||
content: " ▼";
|
content: " ▼";
|
||||||
}
|
}
|
||||||
|
|
||||||
.leaf {
|
.qubed-level {
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
text-wrap: nowrap;
|
text-wrap: nowrap;
|
||||||
|
13
test_scripts/rust.py
Normal file
13
test_scripts/rust.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from qubed.rust import Qube, parse_qube
|
||||||
|
|
||||||
|
q = Qube()
|
||||||
|
print(q)
|
||||||
|
|
||||||
|
print(f"repr: {q.root!r} str: {q.root}")
|
||||||
|
|
||||||
|
q = parse_qube()
|
||||||
|
print(repr(q))
|
||||||
|
|
||||||
|
r = q.root
|
||||||
|
|
||||||
|
print(f"{q.root = }, {q.children = }")
|
@ -1,5 +0,0 @@
|
|||||||
from qubed.rust import hello
|
|
||||||
|
|
||||||
|
|
||||||
def test_hello():
|
|
||||||
assert hello("World") == "Hello, World!"
|
|
@ -1,41 +1,27 @@
|
|||||||
from qubed import Qube
|
from qubed import Qube
|
||||||
|
|
||||||
q = Qube.from_dict(
|
q = Qube.from_tree("""
|
||||||
{
|
root
|
||||||
"class=od": {
|
├── class=od, expver=0001/0002, param=1/2
|
||||||
"expver=0001": {"param=1": {}, "param=2": {}},
|
└── class=rd, param=1/2/3
|
||||||
"expver=0002": {"param=1": {}, "param=2": {}},
|
""")
|
||||||
},
|
|
||||||
"class=rd": {"param=1": {}, "param=2": {}, "param=3": {}},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_consumption():
|
def test_consumption():
|
||||||
assert q.select({"expver": "0001"}, consume=True) == Qube.from_dict(
|
assert q.select({"expver": "0001"}, consume=True) == Qube.from_tree(
|
||||||
{"class=od": {"expver=0001": {"param=1": {}, "param=2": {}}}}
|
"root, class=od, expver=0001, param=1/2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_consumption_off():
|
def test_consumption_off():
|
||||||
expected = Qube.from_dict(
|
expected = Qube.from_tree("""
|
||||||
{
|
root
|
||||||
"class=od": {"expver=0001": {"param=1": {}, "param=2": {}}},
|
├── class=od, expver=0001, param=1/2
|
||||||
"class=rd": {"param=1": {}, "param=2": {}, "param=3": {}},
|
└── class=rd, param=1/2/3
|
||||||
}
|
""")
|
||||||
)
|
|
||||||
assert q.select({"expver": "0001"}, consume=False) == expected
|
assert q.select({"expver": "0001"}, consume=False) == expected
|
||||||
|
|
||||||
|
|
||||||
def test_require_match():
|
|
||||||
expected = Qube.from_dict(
|
|
||||||
{
|
|
||||||
"class=od": {"expver=0001": {"param=1": {}, "param=2": {}}},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
assert q.select({"expver": "0001"}, require_match=True) == expected
|
|
||||||
|
|
||||||
|
|
||||||
def test_function_input_to_select():
|
def test_function_input_to_select():
|
||||||
q = Qube.from_tree("""
|
q = Qube.from_tree("""
|
||||||
root, frequency=6:00:00
|
root, frequency=6:00:00
|
||||||
|
Loading…
x
Reference in New Issue
Block a user