diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-12-19 11:01:55 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-12-19 11:01:55 +0000 |
commit | f1c2dbe3b17a0d5edffbb65b85b642d0bb2756c5 (patch) | |
tree | 5dce0fe2a11381761496eb973c20750f44db56d5 /sqlglotrs/src/lib.rs | |
parent | Releasing debian version 20.1.0-1. (diff) | |
download | sqlglot-f1c2dbe3b17a0d5edffbb65b85b642d0bb2756c5.tar.xz sqlglot-f1c2dbe3b17a0d5edffbb65b85b642d0bb2756c5.zip |
Merging upstream version 20.3.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'sqlglotrs/src/lib.rs')
-rw-r--r-- | sqlglotrs/src/lib.rs | 86 |
1 files changed, 86 insertions, 0 deletions
diff --git a/sqlglotrs/src/lib.rs b/sqlglotrs/src/lib.rs new file mode 100644 index 0000000..c962887 --- /dev/null +++ b/sqlglotrs/src/lib.rs @@ -0,0 +1,86 @@ +use pyo3::prelude::*; +use pyo3::types::{PyList, PyNone, PyString}; + +mod settings; +mod tokenizer; +mod trie; + +pub use self::settings::{ + TokenType, TokenTypeSettings, TokenizerDialectSettings, TokenizerSettings, +}; +pub use self::tokenizer::Tokenizer; + +#[derive(Debug)] +#[pyclass] +pub struct Token { + #[pyo3(get, name = "token_type_index")] + pub token_type: TokenType, + #[pyo3(get, set, name = "token_type")] + pub token_type_py: PyObject, + #[pyo3(get)] + pub text: Py<PyString>, + #[pyo3(get)] + pub line: usize, + #[pyo3(get)] + pub col: usize, + #[pyo3(get)] + pub start: usize, + #[pyo3(get)] + pub end: usize, + #[pyo3(get)] + pub comments: Py<PyList>, +} + +impl Token { + pub fn new( + token_type: TokenType, + text: String, + line: usize, + col: usize, + start: usize, + end: usize, + comments: Vec<String>, + ) -> Token { + Python::with_gil(|py| Token { + token_type, + token_type_py: PyNone::get(py).into(), + text: PyString::new(py, &text).into(), + line, + col, + start, + end, + comments: PyList::new(py, &comments).into(), + }) + } + + pub fn append_comments(&self, comments: &mut Vec<String>) { + Python::with_gil(|py| { + let pylist = self.comments.as_ref(py); + for comment in comments.iter() { + if let Err(_) = pylist.append(comment) { + panic!("Failed to append comments to the Python list"); + } + } + }); + // Simulate `Vec::append`. + let _ = std::mem::replace(comments, Vec::new()); + } +} + +#[pymethods] +impl Token { + #[pyo3(name = "__repr__")] + fn python_repr(&self) -> PyResult<String> { + Ok(format!("{:?}", self)) + } +} + +#[pymodule] +fn sqlglotrs(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_class::<Token>()?; + m.add_class::<TokenTypeSettings>()?; + m.add_class::<TokenizerSettings>()?; + m.add_class::<TokenizerDialectSettings>()?; + m.add_class::<Tokenizer>()?; + Ok(()) +} |