initial commit

This commit is contained in:
Jonas Maier 2023-02-17 20:29:30 +01:00
commit f3292d32a4
7 changed files with 215 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
target/

14
example/Cargo.lock generated Normal file
View File

@ -0,0 +1,14 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "example"
version = "0.1.0"
dependencies = [
"inline-sql",
]
[[package]]
name = "inline-sql"
version = "0.1.0"

9
example/Cargo.toml Normal file
View File

@ -0,0 +1,9 @@
[package]
name = "example"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
inline-sql = { path = "../inline-sql" }

12
example/src/main.rs Normal file
View File

@ -0,0 +1,12 @@
extern crate inline_sql;
use inline_sql::sql;
sql! {
CREATE TABLE MY_TABLE (
X INTEGER,
Y INTEGER,
PRIMARY KEY (Y)
)
}
fn main() {}

7
inline-sql/Cargo.lock generated Normal file
View File

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "inline-sql"
version = "0.1.0"

9
inline-sql/Cargo.toml Normal file
View File

@ -0,0 +1,9 @@
[package]
name = "inline-sql"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]

163
inline-sql/src/lib.rs Normal file
View File

@ -0,0 +1,163 @@
#![feature(proc_macro_span, proc_macro_span_shrink)]
extern crate proc_macro;
use std::{collections::HashMap, ops::Range};
use proc_macro::{Delimiter, Span, TokenStream, TokenTree};
#[derive(Debug, Default, Clone)]
struct Spacing {
sizes: HashMap<usize, Range<usize>>,
}
struct Location {
indent: usize,
line_start: usize,
line_end: usize,
}
impl Location {
fn new(line: usize, start: usize) -> Self {
Self {
indent: start,
line_start: line,
line_end: line,
}
}
fn combine(self, other: Self) -> Self {
Self {
indent: self.indent.min(other.indent),
line_start: self.line_start.min(other.line_start),
line_end: self.line_end.max(other.line_end),
}
}
fn lines(&self) -> usize {
self.line_end - self.line_start + 1
}
fn indent(&self) -> usize {
self.indent
}
}
impl Into<Location> for Spacing {
fn into(self) -> Location {
let loc = self.sizes.iter().next().unwrap();
let mut loc = Location::new(*loc.0, loc.1.start);
for s in self.sizes.iter() {
loc = loc.combine(Location::new(*s.0, s.1.start));
}
loc
}
}
struct Canvas {
location: Location,
buffers: Vec<Vec<char>>,
}
impl Canvas {
fn new(location: Location, spacing: Spacing) -> Self {
let mut buffers = vec![vec![]; location.lines()];
for &line in spacing.sizes.keys() {
let line_length = spacing.sizes[&line].end - location.indent();
let idx = line - location.line_start;
buffers[idx] = vec![' '; line_length];
}
Self { location, buffers }
}
fn render_at(&mut self, span: Span, value: &str) {
let idx = span.start().line - self.location.line_start;
let offset = span.start().column - self.location.indent();
for (i, c) in value.chars().enumerate() {
self.buffers[idx][offset + i] = c;
}
}
fn render_tk(&mut self, token: TokenTree) {
self.render_at(token.span(), &token.to_string());
}
fn render(&mut self, tokens: TokenStream) {
for token in tokens {
if let proc_macro::TokenTree::Group(group) = token {
self.render(group.stream());
let (open, close) = match group.delimiter() {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Brace => ("{", "}"),
Delimiter::Bracket => ("[", "]"),
Delimiter::None => todo!(),
};
self.render_at(group.span_open(), open);
self.render_at(group.span_close(), close);
} else {
self.render_tk(token);
}
}
}
}
impl ToString for Canvas {
fn to_string(&self) -> String {
let mut buf = String::new();
for line in self.buffers.iter() {
for &ch in line.iter() {
buf.push(ch);
}
buf += "\n";
}
buf
}
}
impl Spacing {
/// begin is inclusive, end is exclusive
fn register(&mut self, line: usize, start: usize, end: usize) {
let range = self.sizes.entry(line).or_insert(start..end);
range.start = range.start.min(start);
range.end = range.end.max(end);
}
fn register_span(&mut self, span: Span) {
for line in span.start().line..=span.end().line {
let start = span.start().column.min(span.end().column);
let end = span.start().column.max(span.end().column);
self.register(line, start, end);
}
}
fn visit(&mut self, tokens: TokenStream) {
for token in tokens {
match token {
TokenTree::Group(e) => {
self.register_span(e.span_open());
self.register_span(e.span_close());
self.visit(e.stream());
}
_ => self.register_span(token.span()),
}
}
}
fn print(&self) {
let mut lines = self
.sizes
.iter()
.map(|e| (*e.0, e.1.clone()))
.collect::<Vec<(usize, Range<usize>)>>();
lines.sort_by(|a, b| a.0.cmp(&b.0));
for (_, range) in lines {
for _ in 1..(range.start) {
print!("-");
}
for _ in range {
print!("X");
}
println!();
}
}
}
#[proc_macro]
pub fn sql(tokens: TokenStream) -> TokenStream {
let mut spacing = Spacing::default();
spacing.visit(tokens.clone());
let loc = spacing.clone().into();
let mut canvas = Canvas::new(loc, spacing.clone());
canvas.render(tokens);
print!("{}", canvas.to_string());
"".parse().unwrap()
}