initial commit

This commit is contained in:
Jonas Maier 2023-03-11 13:55:34 +01:00
commit 12d131ded0
12 changed files with 335 additions and 0 deletions

14
.gitignore vendored Normal file
View File

@ -0,0 +1,14 @@
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb

View File

@ -0,0 +1,9 @@
[package]
name = "inline-postgres-impl"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
postgres = "0.19"

View File

@ -0,0 +1,42 @@
use core::marker::PhantomData;
pub use postgres::*;
pub mod prelude {
pub use super::{Fetch, Exec};
}
pub struct Query<'a, O> {
pub code: &'static str,
pub vals: &'a [&'a (dyn types::ToSql + Sync)],
pub _phantom: PhantomData<O>,
}
impl<O> Query<'_, O> {
pub fn execute_on(self, client: &mut Client) -> Result<u64, Error> {
client.execute(self.code, self.vals)
}
}
pub trait Fetch {
fn fetch<'a, O: From<row::Row>>(&mut self, query: Query<'a, O>) -> Result<Vec<O>, Error>;
}
impl Fetch for Client {
fn fetch<'a, O: From<Row>>(&mut self, query: Query<'a, O>) -> Result<Vec<O>, Error> {
let res = self
.query(query.code, query.vals)?
.into_iter()
.map(Into::into)
.collect();
Ok(res)
}
}
pub trait Exec {
fn exec<'a, O: From<row::Row>>(&mut self, query: Query<'a, O>) -> Result<(), Error>;
}
impl Exec for Client {
fn exec<'a, O: From<row::Row>>(&mut self, query: Query<'a, O>) -> Result<(), Error> {
self.query(query.code, query.vals)?;
Ok(())
}
}

View File

@ -0,0 +1,11 @@
[package]
name = "inline-postgres-macros"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
quote = "1.0"
proc-macro2 = "1.0"

View File

@ -0,0 +1,32 @@
use proc_macro::{TokenStream, Span, TokenTree};
#[derive(Clone, Debug)]
pub struct Dimensions {
pub first_line: usize,
pub last_line: usize,
pub indent: usize,
}
impl Dimensions {
pub fn from_tokens(tokens: TokenStream) -> Self {
let mut dim = Self {
first_line: usize::MAX,
last_line: usize::MIN,
indent: usize::MAX,
};
dim.visit_tokens(tokens);
dim
}
fn adjust(&mut self, span: Span) {
self.first_line = self.first_line.min(span.start().line);
self.last_line = self.last_line.max(span.end().line);
self.indent = self.indent.min(span.start().column);
}
fn visit_tokens(&mut self, tokens: TokenStream) {
for token in tokens {
self.adjust(token.span());
if let TokenTree::Group(g) = token {
self.visit_tokens(g.stream());
}
}
}
}

View File

@ -0,0 +1,73 @@
#![feature(proc_macro_span)]
use proc_macro::{TokenStream, TokenTree};
use quote::{quote};
use proc_macro2::TokenStream as TokenStream2;
mod dim;
mod visit;
#[proc_macro]
pub fn pg(tokens: TokenStream) -> TokenStream {
let dimensions = dim::Dimensions::from_tokens(tokens.clone());
let query_data = visit::Visitor::process_sql(dimensions, tokens);
let values: TokenStream2 = query_data
.captured_values()
.iter()
.cloned()
.map(|tt| {
let tt: TokenStream2 = tt.into();
quote!(&(#tt),)
})
.fold(quote!{}, |a, b| quote!{#a #b});
let values = quote!{&[#values]};
let struct_content_def: TokenStream2 = query_data
.returned_values()
.iter()
.cloned()
.map(|o| {
let ident: TokenStream2 = Some(TokenTree::Ident(o.ident)).into_iter().collect::<TokenStream>().into();
let typ: TokenStream2 = o.typ.into();
quote!(
#[allow(unused)]
#ident : #typ,
)
})
.fold(quote!{}, |a, b| quote!{#a #b});
let struct_content_use: TokenStream2 = query_data
.returned_values()
.iter()
.cloned()
.map(|o| {
let ident: TokenStream2 = Some(TokenTree::Ident(o.ident)).into_iter().collect::<TokenStream>().into();
let typ: TokenStream2 = o.typ.into();
let ident_str: TokenStream2 = format!("\"{}\"", ident).parse().unwrap();
quote!(#ident : record.get::<&'static str, #typ>(#ident_str),)
})
.fold(quote!{}, |a, b| quote!{#a #b});
let query = query_data.code();
quote!{{
#[derive(Debug)]
struct r#struct {
#struct_content_def
}
impl From<::inline_postgres::row::Row> for r#struct {
fn from(record: ::inline_postgres::row::Row) -> Self {
r#struct {
#struct_content_use
}
}
}
::inline_postgres::Query {
code: #query,
vals: #values,
_phantom: ::core::marker::PhantomData::<r#struct>,
}
}}.into()
}

View File

@ -0,0 +1,94 @@
use proc_macro::{Delimiter, Ident, Span, TokenStream, TokenTree};
use crate::dim::Dimensions;
#[derive(Clone)]
pub struct Output {
pub ident: Ident,
pub typ: TokenStream,
}
pub struct Visitor {
current_line: usize,
current_column: usize,
dims: Dimensions,
buffer: String,
values: Vec<TokenStream>,
outs: Vec<Output>,
}
impl Visitor {
pub fn code(&self) -> &str {
&self.buffer
}
pub fn captured_values(&self) -> &[TokenStream] {
&self.values
}
pub fn returned_values(&self) -> &[Output] {
&self.outs
}
pub fn process_sql(dims: Dimensions, tokens: TokenStream) -> Self {
let mut visitor = Visitor {
current_line: dims.first_line,
current_column: dims.indent,
dims,
buffer: String::new(),
values: Vec::new(),
outs: Vec::new(),
};
visitor.visit(tokens);
visitor
}
fn print(&mut self, object: &str, span: Span) {
while self.current_line < span.start().line {
self.buffer += "\n";
self.current_line += 1;
self.current_column = self.dims.indent;
}
while self.current_column < span.start().column {
self.buffer += " ";
self.current_column += 1;
}
self.buffer += object;
self.current_line = span.end().line;
self.current_column = span.end().column;
}
fn visit(&mut self, tokens: TokenStream) {
for token in tokens {
if let TokenTree::Group(group) = token {
if group.delimiter() == Delimiter::Brace {
self.values.push(group.stream());
let marker = format!("${}", self.values.len());
self.print(&marker, group.span_open());
} else if group.delimiter() == Delimiter::Bracket {
let tokens = group.stream().into_iter().collect::<Vec<TokenTree>>();
assert!(tokens.len() >= 3);
let ident = match &tokens[0] {
TokenTree::Ident(i) => i.clone(),
_ => panic!("we need an identifier here"),
};
match &tokens[1] {
TokenTree::Punct(p) => {
assert_eq!(':', p.as_char());
}
_ => panic!("expected a colon to separate the variable name and the type"),
}
let typ = tokens.into_iter().skip(2).collect::<TokenStream>();
self.print(&ident.to_string(), group.span());
self.outs.push(Output { ident, typ });
} else {
let (open, close) = match group.delimiter() {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::None => ("", ""),
Delimiter::Bracket | Delimiter::Brace => unreachable!(),
};
self.print(open, group.span_open());
self.visit(group.stream());
self.print(close, group.span_close());
}
} else {
self.print(&token.to_string(), token.span());
}
}
}
}

View File

@ -0,0 +1,10 @@
[package]
name = "inline-postgres"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
inline-postgres-impl = { path = "../inline-postgres-impl" }
inline-postgres-macros = { path = "../inline-postgres-macros" }

View File

@ -0,0 +1,18 @@
use inline_postgres as pg;
use inline_postgres::prelude::*;
fn main() -> Result<(), pg::Error> {
let mut client = pg::Client::connect("host=localhost, user=postgres", pg::NoTls)?;
let x = 5;
let rows = client.fetch(pg! {
select 1 as [a:i32], generate_series(1, {x}) as [b: i32]
})?;
for row in rows {
println!("{row:?}");
}
Ok(())
}

View File

@ -0,0 +1,14 @@
use inline_postgres as pg;
use inline_postgres::prelude::*;
fn main() -> Result<(), pg::Error> {
let mut client = pg::Client::connect("host=localhost, user=postgres", pg::NoTls)?;
let x = 10;
client.exec(pg! {
select 1, generate_series(1, {x}) as number
})?;
Ok(())
}

View File

@ -0,0 +1,12 @@
use inline_postgres as pg;
use inline_postgres::prelude::*;
fn main() -> Result<(), pg::Error> {
let mut client = pg::Client::connect("host=localhost, user=postgres", pg::NoTls)?;
client.exec(pg! {
select 1, generate_series(1, 10) as x
})?;
Ok(())
}

View File

@ -0,0 +1,6 @@
pub use inline_postgres_impl::*;
pub mod prelude {
pub use inline_postgres_impl::prelude::*;
pub use inline_postgres_macros::*;
}