diff --git a/.github/changelog_configuration.json b/.github/changelog_configuration.json
new file mode 100644
index 00000000..25249373
--- /dev/null
+++ b/.github/changelog_configuration.json
@@ -0,0 +1,72 @@
+{
+ "categories": [
+ {
+ "title": "## 🚀 Features",
+ "labels": ["feature"]
+ },
+ {
+ "title": "## 🐛 Fixes",
+ "labels": ["fix"]
+ },
+ {
+ "title": "## 🧪 Tests",
+ "labels": ["test"]
+ },
+ {
+ "title": "## 🧪 Tests and some 🪄 Magic",
+ "labels": ["test", "magic"],
+ "exclude_labels": ["no-magic"],
+ "exhaustive": true,
+ "empty_content": "- no matching PRs"
+ }
+ ],
+ "ignore_labels": [
+ "ignore"
+ ],
+ "sort": {
+ "order": "ASC",
+ "on_property": "mergedAt"
+ },
+ "template": "${{CHANGELOG}}\n\n\nUncategorized
\n\n${{UNCATEGORIZED}}\n ",
+ "pr_template": "- ${{TITLE}}\n - PR: #${{NUMBER}}",
+ "empty_template": "- no changes",
+ "label_extractor": [
+ {
+ "pattern": "(.) (.+)",
+ "target": "$1",
+ "flags": "gu"
+ },
+ {
+ "pattern": "\\[Issue\\]",
+ "on_property": "title",
+ "method": "match"
+ }
+ ],
+ "duplicate_filter": {
+ "pattern": "\\[ABC-....\\]",
+ "on_property": "title",
+ "method": "match"
+ },
+ "transformers": [
+ {
+ "pattern": "[\\-\\*] (\\[(...|TEST|CI|SKIP)\\])( )?(.+?)\n(.+?[\\-\\*] )(.+)",
+ "target": "- $4\n - $6"
+ }
+ ],
+ "max_tags_to_fetch": 200,
+ "max_pull_requests": 200,
+ "max_back_track_time_days": 365,
+ "exclude_merge_branches": [
+ "Owner/qa"
+ ],
+ "tag_resolver": {
+ "method": "semver",
+ "filter": {
+ "pattern": "api-(.+)",
+ "flags": "gu"
+ }
+ },
+ "base_branches": [
+ "dev"
+ ]
+}
\ No newline at end of file
diff --git a/.github/workflows/cache.yml b/.github/workflows/cache.yml
deleted file mode 100644
index adcdc459..00000000
--- a/.github/workflows/cache.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-name: Caching Cargo events
-
-on: push
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
-
- - name: Cache Cargo
- id: cache-cargo
- uses: actions/cache@v3
- env:
- cache-name: cache-cargo
- with:
- path: cache-folder
- key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
- restore-keys: |
- ${{ runner.os }}-build-${{ env.cache-name }}-
- ${{ runner.os }}-build-
- ${{ runner.os }}-
-
- - if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }}
- name: Install grcov
- continue-on-error: true
- run: cargo install grcov
-
- - name: Build
- run: cargo build
-
- - name: Test
- run: cargo test
-
- # TODO we need to pass the OS flags correctly
\ No newline at end of file
diff --git a/.github/workflows/code-coverage.yml b/.github/workflows/code-coverage.yml
new file mode 100644
index 00000000..efeac0d9
--- /dev/null
+++ b/.github/workflows/code-coverage.yml
@@ -0,0 +1,71 @@
+name: Linux CI && Code Coverage
+
+on:
+ push:
+ tags:
+ - 'v[0-9]+.[0-9]+.[0-9]+'
+ - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+'
+
+env:
+ CARGO_TERM_COLOR: always
+
+jobs:
+ code-coverage:
+ permissions:
+ contents: write
+ env:
+ CARGO_INCREMENTAL: '0'
+ RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests'
+ RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Use nightly toolchain
+ run: |
+ rustup toolchain install nightly
+ rustup override set nightly
+
+ - name: Caching cargo dependencies
+ id: project-cache
+ uses: Swatinem/rust-cache@v2
+
+ - if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }}
+ name: Install grcov
+ run: cargo install grcov
+
+ - name: Make the USER own the working directory
+ if: ${{ matrix.os == 'ubuntu-latest' }}
+ run: sudo chown -R $USER:$USER ${{ github.workspace }}
+
+ - name: Waking up docker
+ run: docker-compose -f ./docker/docker-compose.yml up -d
+
+ - name: Run tests
+ run: |
+ cargo test --all-features --no-fail-fast --target=x86_64-unknown-linux-gnu -- --show-output --test-threads=1
+
+ - name: Waking up docker
+ run: |
+ docker-compose -f ./docker/docker-compose.yml down
+ sudo chown -R $USER:$USER ${{ github.workspace }}
+ rm -rf ./docker/postgres-data
+
+ - name: Generate code coverage report
+ run: |
+ grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage
+ grcov . -s . --binary-path ./target/debug/ -t cobertura --branch --ignore-not-existing -o ./target/debug/coverage/code_cov.xml
+
+ - name: Publish Test Results
+ uses: actions/upload-artifact@v3
+ with:
+ name: Unit Test Results
+ path: |
+ ./target/debug/coverage/code_cov.xml
+ ./target/debug/coverage/index.html
+
+ - name: Publish coverage report to GitHub Pages
+ uses: JamesIves/github-pages-deploy-action@v4
+ with:
+ folder: ./target/debug/coverage
+ token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml
new file mode 100644
index 00000000..08b29335
--- /dev/null
+++ b/.github/workflows/code-quality.yml
@@ -0,0 +1,60 @@
+name: Code quality and sanity
+
+on:
+ push:
+ branches: '*'
+ pull_request:
+ branches: '*'
+
+jobs:
+ clippy:
+ name: Lint with Clippy
+ runs-on: ubuntu-latest
+ env:
+ RUSTFLAGS: -Dwarnings
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Caching project dependencies
+ id: project-cache
+ uses: Swatinem/rust-cache@v2
+
+ - uses: hecrj/setup-rust-action@v1
+ with:
+ components: clippy
+ - run: cargo clippy --workspace --all-targets --verbose --all-features -- -A clippy::question_mark
+ rustfmt:
+ name: Verify code formatting
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Caching project dependencies
+ id: project-cache
+ uses: Swatinem/rust-cache@v2
+
+ - uses: hecrj/setup-rust-action@v1
+ with:
+ components: rustfmt
+
+ - run: cargo fmt --all -- --check
+
+ check-rustdoc-links:
+ name: Check intra-doc links
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ crate: [canyon_connection, canyon_crud, canyon_macros, canyon_observer, canyon_manager, canyon_observer, canyon_sql]
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Caching project dependencies
+ id: project-cache
+ uses: Swatinem/rust-cache@v2
+
+ - uses: hecrj/setup-rust-action@v1
+ with:
+ rust-version: nightly
+
+ - run: cargo rustdoc -p ${{ matrix.crate }} --all-features -- -D warnings
diff --git a/.github/workflows/continuous-integration.yml b/.github/workflows/continuous-integration.yml
new file mode 100644
index 00000000..d2cbbd5d
--- /dev/null
+++ b/.github/workflows/continuous-integration.yml
@@ -0,0 +1,50 @@
+name: Continuous Integration
+
+on:
+ push:
+ branches: 'main'
+ pull_request:
+ branches: 'main'
+
+env:
+ CARGO_TERM_COLOR: always
+
+jobs:
+ multiplatform-tests:
+ name: Testing on Rust ${{ matrix.rust }} for ${{ matrix.os }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - { rust: stable, os: ubuntu-latest }
+ - { rust: nightly, os: ubuntu-latest }
+ - { rust: stable, os: macos-latest }
+ - { rust: stable, os: windows-latest }
+
+ steps:
+ - name: Make the USER own the working directory
+ if: ${{ matrix.os == 'ubuntu-latest' }}
+ run: sudo chown -R $USER:$USER ${{ github.workspace }}
+
+ - uses: actions/checkout@v3
+
+ - name: docker-compose
+ if: ${{ matrix.os == 'ubuntu-latest' }}
+ run: docker-compose -f ./docker/docker-compose.yml up -d
+
+ - name: Caching cargo dependencies
+ id: project-cache
+ uses: Swatinem/rust-cache@v2
+
+ - uses: hecrj/setup-rust-action@v1
+ with:
+ rust-version: ${{ matrix.rust }}
+
+ - name: Run all tests, UNIT and INTEGRATION for Linux targets
+ if: ${{ matrix.os == 'ubuntu-latest' }}
+ run: cargo test --verbose --workspace --all-features --no-fail-fast -- --show-output --test-threads=1
+
+ - name: Run UNIT tests with no external connections for the rest of the defined targets
+ if: ${{ matrix.os != 'ubuntu-latest' }}
+ run: cargo test --verbose --workspace --exclude tests --all-features --no-fail-fast -- --show-output
diff --git a/.github/workflows/macos-tests.yml b/.github/workflows/macos-tests.yml
new file mode 100644
index 00000000..21ca2e01
--- /dev/null
+++ b/.github/workflows/macos-tests.yml
@@ -0,0 +1,27 @@
+name: macOS CI
+
+on:
+ push:
+ tags:
+ - 'v[0-9]+.[0-9]+.[0-9]+'
+ - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+'
+
+env:
+ CARGO_TERM_COLOR: always
+
+jobs:
+ linux-tests:
+ runs-on: macos-latest
+ name: Tests for macOS
+ env:
+ CARGO_TERM_COLOR: always
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Caching cargo deps
+ id: ci-cache
+ uses: Swatinem/rust-cache@v2
+
+ - name: Running tests for macOS targets
+ run: |
+ cargo test --all-features --workspace --exclude tests
\ No newline at end of file
diff --git a/.github/workflows/publish-tests-results.yml b/.github/workflows/publish-tests-results.yml
deleted file mode 100644
index ab4390f7..00000000
--- a/.github/workflows/publish-tests-results.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-name: Unit Test Results
-
-on:
- workflow_run:
- workflows: ["deactivated"] ## For disable it while there's no valid XML output
- types:
- - completed
-
-jobs:
- unit-test-results:
- permissions:
- contents: write
- name: Unit Test Results
- runs-on: ubuntu-latest
- if: github.event.workflow_run.conclusion != 'skipped'
-
- steps:
- - name: Download and Extract Artifacts
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: |
- mkdir -p artifacts && cd artifacts
- artifacts_url=${{ github.event.workflow_run.artifacts_url }}
- gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
- do
- IFS=$'\t' read name url <<< "$artifact"
- gh api $url > "$name.zip"
- unzip -d "$name" "$name.zip"
- done
- - name: Publish Unit Test Results
- uses: EnricoMi/publish-unit-test-result-action@v2
- with:
- commit: ${{ github.event.workflow_run.head_sha }}
- event_file: artifacts/Event File/event.json
- event_name: ${{ github.event.workflow_run.event }}
- files: "artifacts/**/*.xml" ## Gcov XML output files are not compatible with the action for now
diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
deleted file mode 100644
index dd2f036d..00000000
--- a/.github/workflows/publish.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-on:
- push:
- # Pattern matched against refs/tags
- tags:
- - '*' # Push events to every tag not containing /
- workflow_dispatch:
-
-name: Publish Canyon-SQL to Crates.io
-
-jobs:
- publish:
- name: Publish
- runs-on: ubuntu-latest
- steps:
- - name: Checkout sources
- uses: actions/checkout@v3
-
- - name: Install stable toolchain
- uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: stable
- override: true
-
- - run: cargo publish --token ${secrets.CRATES_IO_TOKEN}
- env:
- CRATES_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} ##
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000..3946c579
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,51 @@
+name: Generate Canyon-SQL release
+
+on:
+ push:
+ tags:
+ - 'v[0-9]+.[0-9]+.[0-9]+'
+ - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+'
+
+jobs:
+ publish:
+ name: Publish Canyon-SQL
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@v3
+
+ - name: Install stable toolchain
+ uses: actions-rs/toolchain@v1
+ with:
+ profile: minimal
+ toolchain: stable
+ override: true
+
+ - uses: katyo/publish-crates@v1
+ with:
+ registry-token: ${{ secrets.CRATES_IO_TOKEN }}
+ path: './canyon_sql'
+
+ release-publisher:
+ permissions:
+ contents: write
+ name: Generate a new release and update the CHANGELOG
+ runs-on: ubuntu-latest
+ steps:
+ - name: Generate a new Canyon-SQL release on GitHub
+ uses: actions/create-release@v1
+ id: create-release
+ with:
+ draft: false
+ prerelease: false
+ release_name: ${{ steps.version.outputs.version }}
+ tag_name: ${{ github.ref }}
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+
+ - name: "Update the CHANGELOG.md for the release"
+ uses: mikepenz/release-changelog-builder-action@{latest-release}
+ with:
+ configuration: "./.github/changelog_configuration.json"
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
deleted file mode 100644
index 0009dc9f..00000000
--- a/.github/workflows/tests.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-name: CI
-
-on:
- push:
- branches: [ "main" ]
- pull_request:
- branches: [ "main" ]
-
-env:
- CARGO_TERM_COLOR: always
-
-jobs:
- code-coverage:
- permissions:
- contents: write
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
-
- - name: Use nightly toolchain
- run: |
- rustup toolchain install nightly
- rustup override set nightly
- - name: Install grcov
- run: cargo install grcov
-
- - name: Run tests
- env:
- CARGO_INCREMENTAL: '0'
- RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests'
- RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests'
- run: |
- cargo test --all-features --no-fail-fast --target=x86_64-unknown-linux-gnu
- - name: Generate code coverage report
- if: always()
- env:
- CARGO_INCREMENTAL: '0'
- RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests'
- RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests'
- run: |
- grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage
- grcov . -s . --binary-path ./target/debug/ -t cobertura --branch --ignore-not-existing -o ./target/debug/coverage/code_cov.xml
- - name: Upload results as an artifact
- uses: actions/upload-artifact@v3
- with:
- name: Event File
- path: ${{ github.event_path }}
-
- - name: Publish Test Results
- uses: actions/upload-artifact@v3
- with:
- name: Unit Test Results
- path: |
- ./target/debug/coverage/code_cov.xml
- ./target/debug/coverage/index.html
- - name: Publish coverage report to GitHub Pages
- if: ${{ github.ref == 'refs/heads/main' }}
- uses: JamesIves/github-pages-deploy-action@v4
- with:
- folder: ./target/debug/coverage
- token: ${{ secrets.GITHUB_TOKEN }} # TODO handle permissions and apply them to the default token
\ No newline at end of file
diff --git a/.github/workflows/windows-tests.yml b/.github/workflows/windows-tests.yml
new file mode 100644
index 00000000..a6ace765
--- /dev/null
+++ b/.github/workflows/windows-tests.yml
@@ -0,0 +1,27 @@
+name: Windows CI
+
+on:
+ push:
+ tags:
+ - 'v[0-9]+.[0-9]+.[0-9]+'
+ - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+'
+
+env:
+ CARGO_TERM_COLOR: always
+
+jobs:
+ windows-tests:
+ runs-on: windows-latest
+ name: Tests for Windows
+ env:
+ CARGO_TERM_COLOR: always
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Caching cargo deps
+ id: ci-cache
+ uses: Swatinem/rust-cache@v2
+
+ - name: Running tests for Windows OS targets
+ run: |
+ cargo test --all-features --workspace --exclude tests
diff --git a/.gitignore b/.gitignore
index 424822ed..a38bca38 100755
--- a/.gitignore
+++ b/.gitignore
@@ -4,4 +4,5 @@ Cargo.lock
/tester_canyon_sql/
canyon_tester/
macro_utils.rs
-.vscode/
\ No newline at end of file
+.vscode/
+postgres-data/
\ No newline at end of file
diff --git a/Cargo.toml b/Cargo.toml
index 85971f6b..0e3b3993 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -8,5 +8,7 @@ members = [
"canyon_manager",
"canyon_macros",
"canyon_crud",
- "canyon_connection"
+ "canyon_connection",
+
+ "tests"
]
\ No newline at end of file
diff --git a/README.md b/README.md
index 0c64f625..d0147b41 100755
--- a/README.md
+++ b/README.md
@@ -2,8 +2,12 @@
**A full written in `Rust` ORM for multiple databases.**
-[](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/tests.yml)
-[](https://zerodaycode.github.io/Canyon-SQL)
+[](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/code-coverage.yml)
+[](https://zerodaycode.github.io/Canyon-SQL)
+
+[](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/linux-tests.yml)
+[](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/macos-tests.yml)
+[](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/windows-tests.yml)
`Canyon-SQL` is a high level abstraction for working with multiple databases concurrently. Is build on top of the `async` language features
to provide a high speed, high performant library to handling data access for consumers.
diff --git a/bash_aliases.sh b/bash_aliases.sh
new file mode 100644
index 00000000..260765fe
--- /dev/null
+++ b/bash_aliases.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+
+# This file provides command alias commonly used by the developers involved in Canyon-SQL
+# This alias avoid the usage of a bunch of commands for performn an integrated task that
+# depends on several concatenated commands.
+
+# In order to run the script, simply type `$ . ./alias.sh` from the root of the project.
+# (refreshing the current terminal session could be required)
+
+# Executes the docker compose script to wake up the postgres container
+alias UpPostres='docker-compose -f ./docker/docker-compose.yml up'
+# Shutdown the postgres container
+alias DownPostres='docker-compose -f ./docker/docker-compose.yml down'
+# Cleans the generated cache folder for the postgres in the docker
+alias CleanPostgres='rm -rf ./docker/postgres-data'
+
+# Build the project for Windows targets
+alias BuildCanyonWin='cargo build --all-features --target=x86_64-pc-windows-msvc'
+alias BuildCanyonWinFull='cargo clean && cargo build --all-features --target=x86_64-pc-windows-msvc'
+
+# Runs the integration tests of the project for a Windows target
+alias IntegrationTestsWin='cargo test --all-features --no-fail-fast --target=x86_64-pc-windows-msvc -- --show-output --test-threads=1 --nocapture'
+
+# Collects the code coverage for the project (tests must run before this)
+alias CcEnvVars='export CARGO_INCREMENTAL=0
+export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"
+export RUSTDOCFLAGS="-Cpanic=abort"'
+
+alias CodeCov='grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage'
\ No newline at end of file
diff --git a/canyon_connection/src/canyon_database_connector.rs b/canyon_connection/src/canyon_database_connector.rs
index ec094010..1432e823 100644
--- a/canyon_connection/src/canyon_database_connector.rs
+++ b/canyon_connection/src/canyon_database_connector.rs
@@ -1,23 +1,25 @@
use async_std::net::TcpStream;
-use tiberius::{Config, AuthMethod};
-use tokio_postgres::{Client, Connection, NoTls, Socket, tls::NoTlsStream};
+use tiberius::{AuthMethod, Config};
+use tokio_postgres::{tls::NoTlsStream, Client, Connection, NoTls, Socket};
use crate::datasources::DatasourceProperties;
/// Represents the current supported databases by Canyon
-#[derive(Debug)]
+#[derive(Debug, Eq, PartialEq)]
pub enum DatabaseType {
PostgreSql,
- SqlServer
+ SqlServer,
}
impl DatabaseType {
+ /// Returns a variant from Self given a *DatasourceProperties* representing
+ /// some of the available databases in `Canyon-SQL`
pub fn from_datasource(datasource: &DatasourceProperties<'_>) -> Self {
match datasource.db_type {
"postgresql" => Self::PostgreSql,
"sqlserver" => Self::SqlServer,
- _ => todo!() // TODO Change for boxed dyn error type
+ _ => todo!(), // TODO Change for boxed dyn error type
}
}
}
@@ -25,50 +27,63 @@ impl DatabaseType {
/// A connection with a `PostgreSQL` database
pub struct PostgreSqlConnection {
pub client: Client,
- pub connection: Connection
+ pub connection: Connection,
}
/// A connection with a `SqlServer` database
pub struct SqlServerConnection {
- pub client: tiberius::Client
+ pub client: tiberius::Client,
}
-/// The Canyon database connection handler.
+/// The Canyon database connection handler. When a new query is launched,
+/// the `new` associated function returns `Self`, containing in one of its
+/// members an active connection against the matched database type on the
+/// datasource triggering this process
+///
+/// !! Future of this impl. Two aspect to discuss:
+/// - Should we store the active connections? And not triggering
+/// this process on every query? Or it's better to open and close
+/// the connection with the database on every query?
+///
+/// - Now that `Mutex` allow const initializations, we should
+/// refactor the initialization in a real static handler?
pub struct DatabaseConnection {
pub postgres_connection: Option,
pub sqlserver_connection: Option,
- pub database_type: DatabaseType
+ pub database_type: DatabaseType,
}
unsafe impl Send for DatabaseConnection {}
unsafe impl Sync for DatabaseConnection {}
impl DatabaseConnection {
- pub async fn new(datasource: &DatasourceProperties<'_>) -> Result> {
+ pub async fn new(
+ datasource: &DatasourceProperties<'_>,
+ ) -> Result> {
match datasource.db_type {
"postgresql" => {
- let (new_client, new_connection) =
- tokio_postgres::connect(
+ let (new_client, new_connection) = tokio_postgres::connect(
&format!(
"postgres://{user}:{pswd}@{host}:{port}/{db}",
- user = datasource.username,
- pswd = datasource.password,
- host = datasource.host,
- port = datasource.port.unwrap_or_default(),
- db = datasource.db_name
- )[..],
- NoTls
- ).await?;
+ user = datasource.username,
+ pswd = datasource.password,
+ host = datasource.host,
+ port = datasource.port.unwrap_or_default(),
+ db = datasource.db_name
+ )[..],
+ NoTls,
+ )
+ .await?;
Ok(Self {
postgres_connection: Some(PostgreSqlConnection {
client: new_client,
- connection: new_connection
+ connection: new_connection,
}),
sqlserver_connection: None,
- database_type: DatabaseType::from_datasource(&datasource)
+ database_type: DatabaseType::from_datasource(datasource),
})
- },
+ }
"sqlserver" => {
let mut config = Config::new();
@@ -77,21 +92,24 @@ impl DatabaseConnection {
config.database(datasource.db_name);
// Using SQL Server authentication.
- config.authentication(
- AuthMethod::sql_server(datasource.username, datasource.password)
- );
+ config.authentication(AuthMethod::sql_server(
+ datasource.username,
+ datasource.password,
+ ));
// on production, it is not a good idea to do this
config.trust_cert();
// Taking the address from the configuration, using async-std's
// TcpStream to connect to the server.
- let tcp = TcpStream::connect(config.get_addr()).await
- .ok().expect("Error instanciating the SqlServer TCP Stream");
+ let tcp = TcpStream::connect(config.get_addr())
+ .await
+ .expect("Error instanciating the SqlServer TCP Stream");
// We'll disable the Nagle algorithm. Buffering is handled
// internally with a `Sink`.
- tcp.set_nodelay(true).ok().expect("Error in the SqlServer `nodelay` config");
+ tcp.set_nodelay(true)
+ .expect("Error in the SqlServer `nodelay` config");
// Handling TLS, login and other details related to the SQL Server.
let client = tiberius::Client::connect(config, tcp).await;
@@ -99,22 +117,58 @@ impl DatabaseConnection {
Ok(Self {
postgres_connection: None,
sqlserver_connection: Some(SqlServerConnection {
- client: client.ok().expect("A failure happened connecting to the database")
+ client: client.expect("A failure happened connecting to the database"),
}),
- database_type: DatabaseType::from_datasource(&datasource)
+ database_type: DatabaseType::from_datasource(datasource),
})
- },
- &_ => return Err(
- std::io::Error::new(
+ }
+ &_ => {
+ return Err(std::io::Error::new(
std::io::ErrorKind::Unsupported,
format!(
- "There's no `{}` database supported in Canyon-SQL",
+ "There's no `{}` database supported in Canyon-SQL",
datasource.db_type
- )
- ).into_inner().unwrap()
- )
+ ),
+ )
+ .into_inner()
+ .unwrap())
+ }
}
}
}
+#[cfg(test)]
+mod database_connection_handler {
+ use super::*;
+ use crate::CanyonSqlConfig;
+
+ const CONFIG_FILE_MOCK_ALT: &str = r#"
+ [canyon_sql]
+ datasources = [
+ {name = 'PostgresDS', properties.db_type = 'postgresql', properties.username = 'username', properties.password = 'random_pass', properties.host = 'localhost', properties.db_name = 'triforce'},
+ {name = 'SqlServerDS', properties.db_type = 'sqlserver', properties.username = 'username2', properties.password = 'random_pass2', properties.host = '192.168.0.250.1', properties.port = 3340, properties.db_name = 'triforce2'}
+ ]
+ "#;
+
+ /// Tests the behaviour of the `DatabaseType::from_datasource(...)`
+ #[test]
+ fn check_from_datasource() {
+ let config: CanyonSqlConfig = toml::from_str(CONFIG_FILE_MOCK_ALT)
+ .expect("A failure happened retrieving the [canyon_sql] section");
+
+ let psql_ds = &config.canyon_sql.datasources[0].properties;
+ let sqls_ds = &config.canyon_sql.datasources[1].properties;
+
+ assert_eq!(
+ DatabaseType::from_datasource(psql_ds),
+ DatabaseType::PostgreSql
+ );
+ assert_eq!(
+ DatabaseType::from_datasource(sqls_ds),
+ DatabaseType::SqlServer
+ );
+ }
+ // TODO Should we check the behaviour of the database handler here or as an
+ // integration test?
+}
diff --git a/canyon_connection/src/datasources.rs b/canyon_connection/src/datasources.rs
index 73158633..dfba096b 100644
--- a/canyon_connection/src/datasources.rs
+++ b/canyon_connection/src/datasources.rs
@@ -1,10 +1,9 @@
use serde::Deserialize;
-
/// ```
#[test]
fn load_ds_config_from_array() {
- const CONFIG_FILE_MOCK_ALT: &'static str = r#"
+ const CONFIG_FILE_MOCK_ALT: &str = r#"
[canyon_sql]
datasources = [
{name = 'PostgresDS', properties.db_type = 'postgresql', properties.username = 'username', properties.password = 'random_pass', properties.host = 'localhost', properties.db_name = 'triforce'},
@@ -15,50 +14,50 @@ fn load_ds_config_from_array() {
let config: CanyonSqlConfig = toml::from_str(CONFIG_FILE_MOCK_ALT)
.expect("A failure happened retrieving the [canyon_sql] section");
- let ds_0 = &config.canyon_sql.datasources[0];
- let ds_1 = &config.canyon_sql.datasources[1];
-
- assert_eq!(ds_0.name, "PostgresDS");
- assert_eq!(ds_0.properties.db_type, "postgresql");
- assert_eq!(ds_0.properties.username, "username");
- assert_eq!(ds_0.properties.password, "random_pass");
- assert_eq!(ds_0.properties.host, "localhost");
- assert_eq!(ds_0.properties.port, None);
- assert_eq!(ds_0.properties.db_name, "triforce");
+ let ds_0 = &config.canyon_sql.datasources[0];
+ let ds_1 = &config.canyon_sql.datasources[1];
+
+ assert_eq!(ds_0.name, "PostgresDS");
+ assert_eq!(ds_0.properties.db_type, "postgresql");
+ assert_eq!(ds_0.properties.username, "username");
+ assert_eq!(ds_0.properties.password, "random_pass");
+ assert_eq!(ds_0.properties.host, "localhost");
+ assert_eq!(ds_0.properties.port, None);
+ assert_eq!(ds_0.properties.db_name, "triforce");
- assert_eq!(ds_1.name, "SqlServerDS");
- assert_eq!(ds_1.properties.db_type, "sqlserver");
- assert_eq!(ds_1.properties.username, "username2");
- assert_eq!(ds_1.properties.password, "random_pass2");
- assert_eq!(ds_1.properties.host, "192.168.0.250.1");
- assert_eq!(ds_1.properties.port, Some(3340));
- assert_eq!(ds_1.properties.db_name, "triforce2");
+ assert_eq!(ds_1.name, "SqlServerDS");
+ assert_eq!(ds_1.properties.db_type, "sqlserver");
+ assert_eq!(ds_1.properties.username, "username2");
+ assert_eq!(ds_1.properties.password, "random_pass2");
+ assert_eq!(ds_1.properties.host, "192.168.0.250.1");
+ assert_eq!(ds_1.properties.port, Some(3340));
+ assert_eq!(ds_1.properties.db_name, "triforce2");
}
-/// ```
+///
#[derive(Deserialize, Debug, Clone)]
pub struct CanyonSqlConfig<'a> {
#[serde(borrow)]
- pub canyon_sql: Datasources<'a>
+ pub canyon_sql: Datasources<'a>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct Datasources<'a> {
#[serde(borrow)]
- pub datasources: Vec>
+ pub datasources: Vec>,
}
#[derive(Deserialize, Debug, Clone, Copy)]
pub struct DatasourceConfig<'a> {
#[serde(borrow)]
- pub name: &'a str,
- pub properties: DatasourceProperties<'a>
-}
+ pub name: &'a str,
+ pub properties: DatasourceProperties<'a>,
+}
#[derive(Deserialize, Debug, Clone, Copy)]
pub struct DatasourceProperties<'a> {
- pub db_type: &'a str,
- pub username: &'a str,
+ pub db_type: &'a str,
+ pub username: &'a str,
pub password: &'a str,
pub host: &'a str,
pub port: Option,
pub db_name: &'a str,
-}
\ No newline at end of file
+}
diff --git a/canyon_connection/src/lib.rs b/canyon_connection/src/lib.rs
index 62347df6..f6ec435f 100644
--- a/canyon_connection/src/lib.rs
+++ b/canyon_connection/src/lib.rs
@@ -1,25 +1,25 @@
+pub extern crate async_std;
+pub extern crate tiberius;
pub extern crate tokio;
pub extern crate tokio_postgres;
-pub extern crate tiberius;
-pub extern crate async_std;
pub mod canyon_database_connector;
mod datasources;
use std::fs;
-use crate::datasources::{DatasourceConfig, CanyonSqlConfig};
+use crate::datasources::{CanyonSqlConfig, DatasourceConfig};
use lazy_static::lazy_static;
-const CONFIG_FILE_IDENTIFIER: &'static str = "canyon.toml";
-
+const CONFIG_FILE_IDENTIFIER: &str = "canyon.toml";
lazy_static! {
static ref RAW_CONFIG_FILE: String = fs::read_to_string(CONFIG_FILE_IDENTIFIER)
.expect("Error opening or reading the Canyon configuration file");
static ref CONFIG_FILE: CanyonSqlConfig<'static> = toml::from_str(RAW_CONFIG_FILE.as_str())
.expect("Error generating the configuration for Canyon-SQL");
-
- pub static ref DATASOURCES: Vec> = CONFIG_FILE.canyon_sql.datasources.clone();
- pub static ref DEFAULT_DATASOURCE: DatasourceConfig<'static> = CONFIG_FILE.canyon_sql.datasources.clone()[0];
-}
\ No newline at end of file
+ pub static ref DATASOURCES: Vec> =
+ CONFIG_FILE.canyon_sql.datasources.clone();
+ pub static ref DEFAULT_DATASOURCE: DatasourceConfig<'static> =
+ CONFIG_FILE.canyon_sql.datasources.clone()[0];
+}
diff --git a/canyon_crud/src/bounds.rs b/canyon_crud/src/bounds.rs
index 403fe2ad..6f082059 100644
--- a/canyon_crud/src/bounds.rs
+++ b/canyon_crud/src/bounds.rs
@@ -1,27 +1,25 @@
-use std::fmt::Debug;
+#![allow(clippy::extra_unused_lifetimes)]
+use crate::{
+ crud::{CrudOperations, Transaction},
+ mapper::RowMapper,
+};
use canyon_connection::{
- tokio_postgres::types::ToSql,
- tiberius::{
- IntoSql,
- ColumnData
- }
+ tiberius::{ColumnData, IntoSql},
+ tokio_postgres::types::ToSql,
};
+use chrono::{DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, Utc};
+use std::fmt::Debug;
-use chrono::{NaiveDate, NaiveDateTime, NaiveTime, DateTime, FixedOffset, Utc};
-
-use crate::{crud::{CrudOperations, Transaction}, mapper::RowMapper};
-
-
-/// Created for retrieve the field's name of a field of a struct, giving
+/// Created for retrieve the field's name of a field of a struct, giving
/// the Canoyn's autogenerated enum with the variants that maps this
/// fields.
-///
+///
/// ```
/// pub struct Struct<'a> {
/// pub some_field: &'a str
/// }
-///
+///
/// // Autogenerated enum
/// #[derive(Debug)]
/// #[allow(non_camel_case_types)]
@@ -32,29 +30,30 @@ use crate::{crud::{CrudOperations, Transaction}, mapper::RowMapper};
/// So, to retrieve the field's name, something like this w'd be used on some part
/// of the Canyon's Manager crate, to wire the necessary code to pass the field
/// name, retrieved from the enum variant, to a called.
-///
+///
/// // Something like:
/// `let struct_field_name_from_variant = StructField::some_field.field_name_as_str();`
-pub trait FieldIdentifier
- where T: Transaction + CrudOperations + RowMapper + Debug
+pub trait FieldIdentifier
+where
+ T: Transaction + CrudOperations + RowMapper + Debug,
{
fn field_name_as_str(self) -> String;
}
/// Represents some kind of introspection to make the implementors
/// retrieves a value inside some variant of an associated enum type.
-/// and convert it to an [`String`], to enable the convertion of
+/// and convert it to an [`String`], to enable the convertion of
/// that value into something that can be part of an SQL query.
-///
+///
/// It's a generification to convert everything to a string representation
/// in SQL syntax, so the clauses can use any value to make filters
-///
+///
/// Ex:
/// `SELECT * FROM some_table WHERE id = '2'`
-///
+///
/// That '2' it's extracted from some enum that implements [`FieldValueIdentifier`],
/// where usually the variant w'd be something like:
-///
+///
/// ```
/// pub enum Enum {
/// IntVariant(i32)
@@ -62,14 +61,16 @@ pub trait FieldIdentifier
/// ```
/// so, the `.value(self)` method it's called over `self`, gets the value for that variant
/// (or another specified in the logic) and returns that value as an [`String`]
-pub trait FieldValueIdentifier
- where T: Transaction + CrudOperations + RowMapper + Debug
+pub trait FieldValueIdentifier
+where
+ T: Transaction + CrudOperations + RowMapper + Debug,
{
fn value(self) -> String;
}
-impl FieldValueIdentifier for &str
- where T: Transaction + CrudOperations + RowMapper + Debug
+impl FieldValueIdentifier for &str
+where
+ T: Transaction + CrudOperations + RowMapper + Debug,
{
fn value(self) -> String {
self.to_string()
@@ -77,16 +78,15 @@ impl FieldValueIdentifier for &str
}
/// Bounds to some type T in order to make it callable over some fn parameter T
-///
+///
/// Represents the ability of an struct to be considered as candidate to perform
/// actions over it as it holds the 'parent' side of a foreign key relation.
-///
-/// Usually, it's used on the Canyon macros to retrieve the column that
+///
+/// Usually, it's used on the Canyon macros to retrieve the column that
/// this side of the relation it's representing
pub trait ForeignKeyable {
- // type Output; // TODO as
/// Retrieves the field related to the column passed in
- fn get_fk_column<'a>(&self, column: &'a str) -> Option;
+ fn get_fk_column(&self, column: &str) -> Option;
}
/// To define trait objects that helps to relates the necessary bounds in the 'IN` SQL clause
@@ -99,10 +99,9 @@ pub trait QueryParameters<'a>: std::fmt::Debug + Sync + Send {
fn as_sqlserver_param(&self) -> ColumnData<'_>;
}
-
-/// The implementation of the [`tiberius`] [`IntoSql`] for the
+/// The implementation of the [`canyon_connection::tiberius`] [`IntoSql`] for the
/// query parameters.
-///
+///
/// This implementation is necessary because of the generic amplitude
/// of the arguments of the [`Transaction::query`], that should work with
/// a collection of [`QueryParameters<'a>`], in order to allow a workflow
@@ -219,7 +218,9 @@ impl<'a> QueryParameters<'a> for Option<&f32> {
}
fn as_sqlserver_param(&self) -> ColumnData<'_> {
- ColumnData::F32(Some(*self.expect("Error on an f32 value on QueryParameters<'_>")))
+ ColumnData::F32(Some(
+ *self.expect("Error on an f32 value on QueryParameters<'_>"),
+ ))
}
}
impl<'a> QueryParameters<'a> for f64 {
@@ -255,7 +256,9 @@ impl<'a> QueryParameters<'a> for Option<&f64> {
}
fn as_sqlserver_param(&self) -> ColumnData<'_> {
- ColumnData::F64(Some(*self.expect("Error on an f64 value on QueryParameters<'_>")))
+ ColumnData::F64(Some(
+ *self.expect("Error on an f64 value on QueryParameters<'_>"),
+ ))
}
}
impl<'a> QueryParameters<'a> for i64 {
@@ -319,9 +322,7 @@ impl<'a> QueryParameters<'a> for Option {
fn as_sqlserver_param(&self) -> ColumnData<'_> {
match self {
- Some(string) => ColumnData::String(
- Some(std::borrow::Cow::Owned(string.to_owned()))
- ),
+ Some(string) => ColumnData::String(Some(std::borrow::Cow::Owned(string.to_owned()))),
None => ColumnData::String(None),
}
}
@@ -333,9 +334,7 @@ impl<'a> QueryParameters<'a> for Option<&String> {
fn as_sqlserver_param(&self) -> ColumnData<'_> {
match self {
- Some(string) => ColumnData::String(
- Some(std::borrow::Cow::Borrowed(string))
- ),
+ Some(string) => ColumnData::String(Some(std::borrow::Cow::Borrowed(string))),
None => ColumnData::String(None),
}
}
@@ -442,7 +441,7 @@ impl<'a> QueryParameters<'_> for DateTime {
self.into_sql()
}
}
-impl<'a> QueryParameters<'a> for Option> {
+impl<'a> QueryParameters<'_> for Option> {
fn as_postgres_param(&self) -> &(dyn ToSql + Sync) {
self
}
@@ -450,4 +449,4 @@ impl<'a> QueryParameters<'a> for Option> {
fn as_sqlserver_param(&self) -> ColumnData<'_> {
self.into_sql()
}
-}
\ No newline at end of file
+}
diff --git a/canyon_crud/src/crud.rs b/canyon_crud/src/crud.rs
index 5dd277cb..a16afaf0 100644
--- a/canyon_crud/src/crud.rs
+++ b/canyon_crud/src/crud.rs
@@ -3,158 +3,173 @@ use std::fmt::{Debug, Display};
use async_trait::async_trait;
use canyon_connection::canyon_database_connector::DatabaseType;
-use crate::{bounds::QueryParameters, query_elements::query_builder::QueryBuilder};
use crate::mapper::RowMapper;
use crate::result::DatabaseResult;
+use crate::{bounds::QueryParameters, query_elements::query_builder::QueryBuilder};
use canyon_connection::{
- DATASOURCES,
- DEFAULT_DATASOURCE,
- canyon_database_connector::DatabaseConnection,
+ canyon_database_connector::DatabaseConnection, DATASOURCES, DEFAULT_DATASOURCE,
};
-
/// This traits defines and implements a query against a database given
/// an statemt `stmt` and the params to pass the to the client.
-///
+///
/// It returns a [`DatabaseResult`], which is the core Canyon type to wrap
/// the result of the query and, if the user desires,
/// automatically map it to an struct.
#[async_trait]
+#[allow(clippy::question_mark)]
pub trait Transaction {
/// Performs the necessary to execute a query against the database
- async fn query<'a, S, Z>(stmt: S, params: Z, datasource_name: &'a str)
- -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>>
- where
- S: AsRef + Display + Sync + Send + 'a,
- Z: AsRef<[&'a dyn QueryParameters<'a>]> + Sync + Send + 'a
+ async fn query<'a, S, Z>(
+ stmt: S,
+ params: Z,
+ datasource_name: &'a str,
+ ) -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>>
+ where
+ S: AsRef + Display + Sync + Send + 'a,
+ Z: AsRef<[&'a dyn QueryParameters<'a>]> + Sync + Send + 'a,
{
- let database_connection = if datasource_name == "" {
- DatabaseConnection::new(&DEFAULT_DATASOURCE.properties).await
- } else { // Get the specified one
- DatabaseConnection::new(
+ let database_connection =
+ if datasource_name.is_empty() {
+ DatabaseConnection::new(&DEFAULT_DATASOURCE.properties).await
+ } else {
+ // Get the specified one
+ DatabaseConnection::new(
&DATASOURCES.iter()
- .find( |ds| ds.name == datasource_name)
- .expect(&format!("No datasource found with the specified parameter: `{}`", datasource_name))
- .properties
+ .find(|ds| ds.name == datasource_name)
+ .unwrap_or_else(||
+ panic!("No datasource found with the specified parameter: `{datasource_name}`")
+ ).properties
).await
- };
+ };
if let Err(_db_conn) = database_connection {
- todo!();
+ return Err(_db_conn);
} else {
- // No errors
- let db_conn = database_connection.ok().unwrap();
+ let db_conn = database_connection?;
match db_conn.database_type {
- DatabaseType::PostgreSql =>
- postgres_query_launcher::launch::(db_conn, stmt.to_string(), params.as_ref()).await,
- DatabaseType::SqlServer =>
- sqlserver_query_launcher::launch::(db_conn, &mut stmt.to_string(), params).await
+ DatabaseType::PostgreSql => {
+ postgres_query_launcher::launch::(db_conn, stmt.to_string(), params.as_ref())
+ .await
+ }
+ DatabaseType::SqlServer => {
+ sqlserver_query_launcher::launch::(db_conn, &mut stmt.to_string(), params)
+ .await
+ }
}
}
}
}
-/// [`CrudOperations`] it's one of the core parts of Canyon.
-///
-/// Here it's defined and implemented every CRUD operation that Canyon
-/// makes available to the user, directly derived with a `CanyonCrud`
+/// *CrudOperations* it's the core part of Canyon-SQL.
+///
+/// Here it's defined and implemented every CRUD operation
+/// that the user has available, just by deriving the `CanyonCrud`
/// derive macro when a struct contains the annotation.
-///
-/// Also, this traits needs that the type T over what it's generified
+///
+/// Also, this traits needs that the type T over what it's generified
/// to implement certain types in order to work correctly.
-///
+///
/// The most notorious one it's the [`RowMapper`] one, which allows
/// Canyon to directly maps database results into structs.
-///
-/// See it's definition and docs to see the real implications.
+///
+/// See it's definition and docs to see the implementations.
/// Also, you can find the written macro-code that performs the auto-mapping
-/// in the [`canyon_macros`] crates, on the root of this project.
+/// in the *canyon_sql::canyon_macros* crates, on the root of this project.
#[async_trait]
-pub trait CrudOperations: Transaction
- where T: Debug + CrudOperations + RowMapper
+pub trait CrudOperations: Transaction
+where
+ T: Debug + CrudOperations + RowMapper,
{
async fn find_all<'a>() -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>;
-
- async fn find_all_datasource<'a>(datasource_name: &'a str) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>;
-
+
+ async fn find_all_datasource<'a>(
+ datasource_name: &'a str,
+ ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>;
+
async fn find_all_unchecked<'a>() -> Vec;
-
+
async fn find_all_unchecked_datasource<'a>(datasource_name: &'a str) -> Vec;
- fn find_all_query<'a>() -> QueryBuilder<'a, T>;
-
- fn find_all_query_datasource<'a>(datasource_name: &'a str) -> QueryBuilder<'a, T>;
-
+ fn find_query<'a>() -> QueryBuilder<'a, T>;
+
+ fn find_query_datasource(datasource_name: &str) -> QueryBuilder<'_, T>;
+
async fn count() -> Result>;
-
- async fn count_datasource<'a>(datasource_name: &'a str) -> Result>;
- async fn find_by_pk<'a>(value: &'a dyn QueryParameters<'a>)
- -> Result