Skip to content
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ target/
Cargo.lock
.idea/
/tester_canyon_sql/
canyon_tester/
canyon_tester/
macro_utils.rs
10 changes: 6 additions & 4 deletions canyon_macros/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@ version = "0.1.0"
edition = "2018"

[dependencies]
syn = "1.0"
quote = "1.0"
proc-macro2 = "1.0"
syn = { version = "1.0.86", features = ["full"] }
quote = "1.0.9"
proc-macro2 = "1.0.27"
futures = "0.3.21"
canyon_observer = { path = "../canyon_observer" }

[lib]
proc-macro = true
proc-macro = true
59 changes: 59 additions & 0 deletions canyon_macros/src/canyon_macro.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/// Provides helpers to build the #[canyon] procedural macro like attribute

use proc_macro2::TokenStream;
use syn::Block;
use quote::quote;

use canyon_observer::CANYON_REGISTER;

/// Creates a TokenScream that is used to load the data generated at compile-time
/// by the `CanyonManaged` macros again on the Canyon register but
pub fn _wire_data_on_canyon_register(canyon_manager_tokens: &mut Vec<TokenStream>) {
let mut identifiers = String::new();

unsafe {
for element in &CANYON_REGISTER {
identifiers.push_str(element.as_str());
identifiers.push(',');
}
}

let tokens = quote! {
use canyon_sql::canyon_observer::{
CANYON_REGISTER,
CREDENTIALS,
credentials::DatabaseCredentials
};

unsafe { CREDENTIALS = Some(DatabaseCredentials::new()); }
unsafe { println!("CREDENTIALS MACRO IN: {:?}", CREDENTIALS); }
unsafe { CANYON_REGISTER = #identifiers
.split(',')
.map(str::to_string)
.collect();
// TODO Delete (or just pick without it) the last elemement
// from the new assignation
// CANYON_REGISTER.pop_back();
}
unsafe { println!("Register status IN: {:?}", CANYON_REGISTER) };
};

canyon_manager_tokens.push(tokens);
}

/// Generates the TokenStream that has the code written by the user
/// in the `fn main()`
pub fn _user_body_builder(func_body: Box<Block>, macro_tokens: &mut Vec<TokenStream>) {
// Gets a Vec<Stmt> with all the staments in the body of the fn
let function_statements = func_body.stmts;

for stmt in function_statements {
let quote = quote! {#stmt};
let quoterino: TokenStream = quote
.to_string()
.parse()
.unwrap();

macro_tokens.push(quoterino)
}
}
149 changes: 112 additions & 37 deletions canyon_macros/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,96 @@
extern crate proc_macro;

use proc_macro2::Ident;
use proc_macro::TokenStream as CompilerTokenStream;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{
DeriveInput, Fields, Visibility
DeriveInput, Fields, Visibility, parse_macro_input, ItemFn, Type
};


mod canyon_macro;

use canyon_macro::{_user_body_builder, _wire_data_on_canyon_register};
use canyon_observer::CANYON_REGISTER;


/// Macro for handling the entry point to the program.
///
/// Avoids the user to write the tokio attribute and
/// the async modifier to the main fn
/// TODO Check for the _meta attribute metadata when necessary
#[proc_macro_attribute]
pub fn canyon(_meta: CompilerTokenStream, input: CompilerTokenStream) -> CompilerTokenStream {
// get the function this attribute is attached to
let func = parse_macro_input!(input as ItemFn);
let sign = func.sig;
let body = func.block;

// The code written by the Canyon Manager
let mut canyon_manager_tokens: Vec<TokenStream> = Vec::new();
// Builds the code that Canyon needs in it's initialization
_wire_data_on_canyon_register(&mut canyon_manager_tokens);

// The code written by the user
let mut macro_tokens: Vec<TokenStream> = Vec::new();
// Builds the code that represents the user written code
_user_body_builder(body, &mut macro_tokens);


let tok = quote! {
use canyon_sql::tokio;
#[tokio::main]
async #sign {
{
#(#canyon_manager_tokens)*
}

#(#macro_tokens)*
}
};

tok.into()
}


/// Takes data from the struct annotated with macro to fill the Canyon Register
/// where lives the data that Canyon needs to work in `managed mode`
#[proc_macro_attribute]
pub fn canyon_managed(_meta: CompilerTokenStream, input: CompilerTokenStream) -> CompilerTokenStream {
let ast: DeriveInput = syn::parse(input).unwrap();
let (vis, ty, generics) = (&ast.vis, &ast.ident, &ast.generics);
let fields = fields_with_types(
match ast.data {
syn::Data::Struct(ref s) => &s.fields,
_ => panic!("Field names can only be derived for structs"),
}
);

// Notifies the observer that an observable must be registered on the system
// In other words, adds the data of the structure to the Canyon Register
unsafe { CANYON_REGISTER.push(ty.to_string()); }
println!("Observable <{}> added to the register", ty.to_string());


let struct_fields = fields.iter().map(|(_vis, ident, ty)| {
quote! {
#vis #ident: #ty
}
});

let (_impl_generics, ty_generics, _where_clause) =
generics.split_for_impl();

let tokens = quote! {
pub struct #ty <#ty_generics> {
#(#struct_fields),*
}
};

tokens.into()
}


/// Allows the implementors to auto-derive de `crud-operations` trait, which defines the methods
/// that will perform the database communication and that will query against the db.
#[proc_macro_derive(CanyonCRUD)]
Expand All @@ -18,6 +103,7 @@ pub fn crud_operations(input: proc_macro::TokenStream) -> proc_macro::TokenStrea
impl_crud_operations_trait_for_struct(&ast)
}


fn impl_crud_operations_trait_for_struct(ast: &syn::DeriveInput) -> proc_macro::TokenStream {
let ty = &ast.ident;
let tokens = quote! {
Expand All @@ -31,24 +117,24 @@ fn impl_crud_operations_trait_for_struct(ast: &syn::DeriveInput) -> proc_macro::

#[proc_macro_derive(CanyonMapper)]
pub fn implement_row_mapper_for_type(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
// Gets the data from the AST
let ast: DeriveInput = syn::parse(input).unwrap();
let (vis, ty, generics) = (&ast.vis, &ast.ident, &ast.generics);

// Retrieves the table name automatically from the Struct identifier
// or from the TODO: #table_name = 'user_defined_db_table_name'
let table_name: String = database_table_name_from_struct(ty);

// Recoves the identifiers of the struct's members
let fields = filter_fields(
match ast.data {
syn::Data::Struct(ref s) => &s.fields,
_ => panic!("Field names can only be derived for structs"),
}
);

let names_const_fields_str = fields.iter().map(|(_vis, ident)| {
let ident_name = ident.to_string();
quote! {
#ident_name
}
});

// Creates the TokenStream for wire the column names into the
// Canyon RowMapper
let field_names_for_row_mapper = fields.iter().map(|(_vis, ident)| {
let ident_name = ident.to_string();
quote! {
Expand All @@ -57,53 +143,29 @@ pub fn implement_row_mapper_for_type(input: proc_macro::TokenStream) -> proc_mac
}
});

// Get the generics identifiers
let (impl_generics, ty_generics, where_clause) =
generics.split_for_impl();


let tokens = quote! {
use canyon_sql::{
self, crud::CrudOperations, mapper::RowMapper,
async_trait::*,
};
use canyon_sql::tokio_postgres::Row;

impl #impl_generics #ty #ty_generics
#where_clause
{
// Find all
// Find all // Select registers by columns not enabled yet
#vis async fn find_all() -> Vec<#ty> {
#ty::__find_all(#table_name, &[])
<#ty as CrudOperations<#ty>>::__find_all(#table_name, &[])
.await
.as_response::<#ty>()
}

// Find by ID
#vis async fn find_by_id(id: i32) -> #ty {
#ty::__find_by_id(#table_name, id)
<#ty as CrudOperations<#ty>>::__find_by_id(#table_name, id)
.await
.as_response::<#ty>()[0].clone()
}

fn get_field_names() -> Vec<String> {
let mut vec = Vec::new();

let field_names = stringify!(
#(#names_const_fields_str),*
).split(",")
.collect::<Vec<_>>()
.into_iter()
.for_each( |field_name|
vec.push(
field_name
.replace('"', "")
.replace(' ', "")
.to_string()
)
);
vec
}

}

impl RowMapper<Self> for #ty {
Expand All @@ -129,6 +191,19 @@ fn filter_fields(fields: &Fields) -> Vec<(Visibility, Ident)> {
}


fn fields_with_types(fields: &Fields) -> Vec<(Visibility, Ident, Type)> {
fields
.iter()
.map(|field|
(field.vis.clone(),
field.ident.as_ref().unwrap().clone(),
field.ty.clone()
)
)
.collect::<Vec<_>>()
}


/// Parses a syn::Identifier to get a snake case database name from the type identifier
fn database_table_name_from_struct(ty: &Ident) -> String {

Expand Down
8 changes: 8 additions & 0 deletions canyon_observer/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[package]
name = "canyon_observer"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
65 changes: 65 additions & 0 deletions canyon_observer/src/credentials.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/// This crate will replace the action of retrieve the database credentials
/// in order to only wire the once time to the entire program's lifetime

use std::{fs, collections::HashMap};

/// Manages to retrieve the credentials to the desired database connection from an
/// handcoded `Secrets.toml` file, located at the root of the project.
#[derive(Clone, Debug)]
pub struct DatabaseCredentials {
pub username: String,
pub password: String,
pub db_name: String,
}

impl DatabaseCredentials{

pub fn new() -> Self {

let parsed_credentials = DatabaseCredentials::credentials_parser();

Self {
username: parsed_credentials.get("username").unwrap().to_owned(),
password: parsed_credentials.get("password").unwrap().to_owned(),
db_name: parsed_credentials.get("db_name").unwrap().to_owned()
}
}

pub fn credentials_parser() -> HashMap<String, String> {

const FILE_NAME: &str = "Secrets.toml";
let mut credentials_mapper: HashMap<_, _> = HashMap::new();

let secrets_file = fs::read_to_string(FILE_NAME)
.expect( // TODO Convert this to a custom error
&(format!(
"\n\nNo file --> {} <-- founded on the root of this project.", FILE_NAME
) + "\nPlease, ensure that you created one .toml file with the necesary"
+ " properties needed in order to connect to the database.\n\n")
);

let secrets_file_splitted = secrets_file
.split_terminator("\n");

for entry in secrets_file_splitted {
let cleaned_entry =
entry
.split_ascii_whitespace()
.filter(
|x| x != &"="
);

let mut pair = Vec::new();
cleaned_entry.for_each(
|elem| pair.push(elem.to_string())
);

let attr = pair.get(0).unwrap();
let value = pair.get(1).unwrap();

credentials_mapper.insert(attr.to_owned(), value.to_owned());
}

credentials_mapper
}
}
Loading