#![recursion_limit = "128"]
use proc_macro2::*;
use quote::*;
use regex::Regex;
use std::iter::once;
use html5ever::tendril::*;
use html5ever::tokenizer::BufferQueue;
use html5ever::tokenizer::{
CharacterTokens, CommentToken, EndTag, NullCharacterToken, StartTag, TagToken,
};
use html5ever::tokenizer::{
EOFToken, ParseError, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts,
};
#[proc_macro_attribute]
pub fn template(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let attr = proc_macro2::TokenStream::from(attr);
let item = proc_macro2::TokenStream::from(item);
let mut file_ = String::new();
let mut attr = attr.into_iter();
let mut re = regex::Regex::new(r"(\{\{)|(\}\})|(\{([^\}\n]+)\})").expect("regex");
while let Some(a) = attr.next() {
if format!("{}", a) == "path" {
if let (Some(a), Some(b)) = (attr.next(), attr.next()) {
if format!("{}", a) == "=" {
file_ = format!("{}", b)
}
}
} else if format!("{}", a) == "regex" {
if let (Some(a), Some(b)) = (attr.next(), attr.next()) {
if format!("{}", a) == "=" {
re = regex::Regex::new(&format!("{}", b)).expect("regex");
}
}
} else {
println!("unknown attribute {:?}", a);
}
}
let cargo_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let mut file = std::path::Path::new(&cargo_dir).join("templates");
file.push(file_.trim_matches('"'));
let template = std::fs::read_to_string(&file).unwrap();
let re_amp = regex::Regex::new(r"(&[a-zA-Z]+;)|&").unwrap();
let template = re_amp.replace_all(&template, |cap: ®ex::Captures| {
if let Some(c) = cap.get(1) {
c.as_str().to_string()
} else {
"&".to_string()
}
});
let mut name = None;
let mut item = item.into_iter();
let mut item_ = Vec::new();
let mut spec = proc_macro2::TokenStream::new();
let mut spec2 = proc_macro2::TokenStream::new();
let mut is_name = true;
let mut last_was_name = false;
loop {
match item.next() {
Some(TokenTree::Ident(id)) => {
if id.to_string() == "struct" || id.to_string() == "enum" {
let it = item.next().unwrap();
name = Some(syn::Ident::new(&format!("{}", it), it.span()));
item_.push(TokenTree::Ident(id));
item_.push(it);
last_was_name = true;
} else {
item_.push(TokenTree::Ident(id));
}
}
None => break,
Some(TokenTree::Punct(p)) => {
if last_was_name {
if p.to_string() == "<" {
let mut level = 1;
spec.extend(once(TokenTree::Punct(p.clone())));
spec2.extend(once(TokenTree::Punct(p.clone())));
item_.push(TokenTree::Punct(p));
loop {
match item.next() {
Some(TokenTree::Punct(p)) => {
let pp = p.to_string();
spec.extend(once(TokenTree::Punct(p.clone())));
item_.push(TokenTree::Punct(p.clone()));
if pp == ">" {
level -= 1;
if level <= 0 {
spec2.extend(once(TokenTree::Punct(p.clone())));
break;
}
} else if pp == "<" {
level += 1;
} else if pp == ":" {
is_name = false;
} else if pp == "," && level == 1 {
spec2.extend(once(TokenTree::Punct(p.clone())));
is_name = true;
} else if is_name {
spec2.extend(once(TokenTree::Punct(p.clone())));
}
}
Some(it) => {
spec.extend(once(it.clone()));
if is_name {
spec2.extend(once(it.clone()));
}
item_.push(it)
}
None => break,
}
}
} else {
item_.push(TokenTree::Punct(p));
}
} else {
item_.push(TokenTree::Punct(p))
}
}
Some(it) => item_.push(it),
}
}
let name = name.unwrap();
use std::iter::FromIterator;
let item = proc_macro2::TokenStream::from_iter(item_);
let tokens = walk(re, &template);
let tok: TokenStream = tokens.parse().unwrap();
let file = file.to_str().unwrap();
let tokens = quote! {
impl #spec cuach::Render for #name #spec2 {
fn render_into<W: std::fmt::Write>(&self, w: &mut W) -> Result<(), anyhow::Error> {
let _ = include_bytes!(#file);
use std::fmt::Write;
use cuach::Render;
#tok
Ok(())
}
}
#item
};
proc_macro::TokenStream::from(tokens)
}
use std::fmt::Write;
#[derive(Clone)]
struct TokenPrinter {
result: String,
current: String,
re: Regex,
post_comment: bool,
last_was_content: bool,
}
impl TokenSink for TokenPrinter {
type Handle = ();
fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
match token {
CharacterTokens(contents) => {
if !self.post_comment || !contents.trim().is_empty() {
if self.post_comment {
self.current.push_str(contents.trim_start());
} else {
self.current.push_str(&contents);
}
self.post_comment = false;
}
self.last_was_content = true;
}
NullCharacterToken => {}
TagToken(tag) => {
self.last_was_content = false;
self.post_comment = false;
match tag.kind {
StartTag => {
write!(&mut self.current, "<{}", tag.name).unwrap();
}
EndTag => {
write!(&mut self.current, "</{}>", tag.name).unwrap();
}
}
for attr in tag.attrs.iter() {
if tag.name.as_bytes() == b"script"
&& (attr.name.local.as_bytes() == b"async"
|| attr.name.local.as_bytes() == b"defer")
{
write!(&mut self.current, " {}", attr.name.local).unwrap();
} else if attr.value.chars().any(|x| x == '"') && attr.value.chars().all(|x| x != '\'') {
write!(&mut self.current, " {}='{}'", attr.name.local, attr.value)
.unwrap();
} else {
write!(&mut self.current, " {}=\"{}\"", attr.name.local, attr.value.replace("\"", """))
.unwrap();
}
}
if tag.self_closing {
write!(&mut self.current, "/>").unwrap()
} else if let StartTag = tag.kind {
write!(&mut self.current, ">").unwrap()
}
}
ParseError(err) => {
panic!("ERROR: {}", err);
}
CommentToken(contents) => {
while self.current.ends_with("\n") {
self.current.pop();
}
self.rollup();
let contents = contents.replace("&", "&");
self.result.push_str("\n");
self.result.push_str(&contents);
self.result.push_str("\n");
self.post_comment = true;
self.last_was_content = false;
}
EOFToken => {
self.rollup();
}
_ => {}
}
TokenSinkResult::Continue
}
}
fn walk(re: Regex, input: &str) -> String {
let sink = TokenPrinter {
post_comment: true,
re,
current: String::new(),
result: String::new(),
last_was_content: false,
};
let chunk = StrTendril::try_from_byte_slice(input.as_bytes()).unwrap();
let mut input = BufferQueue::new();
input.push_back(chunk.try_reinterpret().unwrap());
let mut tok = Tokenizer::new(sink, TokenizerOpts::default());
let _ = tok.feed(&mut input);
assert!(input.is_empty());
tok.end();
tok.sink.result
}
enum Arg<'a> {
Text(&'a str),
Arg(&'a str),
}
struct Args<'a, 'b> {
caps: regex::CaptureMatches<'a, 'b>,
start: usize,
s: &'b str,
reserve: Option<Arg<'b>>,
finished: bool,
}
impl TokenPrinter {
fn rollup(&mut self) {
if !self.current.is_empty() {
let args = Args {
start: 0,
caps: self.re.captures_iter(&self.current),
s: &self.current,
reserve: None,
finished: false,
};
for args in args {
match args {
Arg::Text(t) => {
let re = regex::Regex::new(r"\s+").expect("regex");
let t = re.replace_all(&t, " ");
writeln!(&mut self.result, "w.write_str({:?})?;", t).unwrap()
}
Arg::Arg(a) => {
writeln!(&mut self.result, "(\n{}\n).render_into(w)?;", a).unwrap()
}
}
}
self.current.clear();
}
}
}
impl<'a, 'b> Iterator for Args<'a, 'b> {
type Item = Arg<'b>;
fn next(&mut self) -> Option<Self::Item> {
if self.finished {
return None;
}
if let Some(r) = self.reserve.take() {
return Some(r);
} else if let Some(cap) = self.caps.next() {
if let Some(cap2) = cap.get(1) {
return if cap2.start() > self.start {
let r = self.s.split_at(cap2.start()).0;
let r = r.split_at(self.start).1;
self.start = cap2.end();
self.reserve = Some(Arg::Text("{"));
Some(Arg::Text(r))
} else {
self.start = cap2.end();
Some(Arg::Text("{"))
};
} else if let Some(cap2) = cap.get(2) {
return if cap2.start() > self.start {
let r = self.s.split_at(cap2.start()).0;
let r = r.split_at(self.start).1;
self.start = cap2.end();
self.reserve = Some(Arg::Text("}"));
Some(Arg::Text(r))
} else {
self.start = cap2.end();
Some(Arg::Text("}"))
};
} else if let Some(cap2) = cap.get(3) {
let r = self.s.split_at(cap2.start()).0;
let r = r.split_at(self.start).1;
return if cap2.start() > self.start {
self.start = cap2.end();
self.reserve = Some(Arg::Arg(cap.get(4).unwrap().as_str()));
Some(Arg::Text(r))
} else {
self.start = cap2.end();
Some(Arg::Arg(cap.get(4).unwrap().as_str()))
};
}
}
if self.start < self.s.len() {
self.finished = true;
Some(Arg::Text(self.s.split_at(self.start).1))
} else {
None
}
}
}