diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..d96a0737 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +# CODEOWNERS file for automating PR flows for the project + +* @Pyzyryab +* @Gbm25 diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..5ef3bc16 --- /dev/null +++ b/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +zerodaycode@tutanota.com. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..1cc293fd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: "\U0001F41B Bug Report" +about: "If something isn't working as expected \U0001F914." +title: '' +labels: 'i: bug' +assignees: '' +--- + +## Bug Report + +### Description +A clear and concise description of what the bug is. + +### Steps to reproduce + +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +### Expected result +A clear and concise description of what you expected to happen. + +### Actual result +What is the actual behaviour you saw? + +### Possible solution + + +### Additional information +For example, screenshots or analysis so far. + +### Environment +OS: +Browser: +Stack used and version: diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..9f16be28 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,19 @@ +--- +name: "\U0001F680 Feature Request" +about: "I have a suggestion! \U0001F642" +title: '' +labels: 'i: enhancement' +assignees: '' + +--- + +## Feature Request + +### Is your feature request related to a problem? Please describe. +A clear and concise description of what the problem is. Ex. I have an issue when [...] + +### Describe the solution you'd like +A clear and concise description of what you want to happen. Add any considered drawbacks. + +### Describe alternatives you've considered +A clear and concise description of any alternative solutions or features you've considered. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000..8e7a298e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,12 @@ +--- +name: "\U00002753 Question" +about: "I'm trying to figure something out. \U0001F914" +title: '' +labels: '' +assignees: '' + +--- + +## Question + +What are you trying to understand? diff --git a/.github/ISSUE_TEMPLATE/refactor.md b/.github/ISSUE_TEMPLATE/refactor.md new file mode 100644 index 00000000..921220a6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/refactor.md @@ -0,0 +1,12 @@ +--- +name: "\U0000267B Refactor" +about: "I spotted something we can do better. \U0001F913" +title: '' +labels: '' +assignees: '' + +--- + +## Refactor + +Describe the improvement we can make. diff --git a/.github/changelog_configuration.json b/.github/changelog_configuration.json new file mode 100644 index 00000000..25249373 --- /dev/null +++ b/.github/changelog_configuration.json @@ -0,0 +1,72 @@ +{ + "categories": [ + { + "title": "## 🚀 Features", + "labels": ["feature"] + }, + { + "title": "## 🐛 Fixes", + "labels": ["fix"] + }, + { + "title": "## 🧪 Tests", + "labels": ["test"] + }, + { + "title": "## 🧪 Tests and some 🪄 Magic", + "labels": ["test", "magic"], + "exclude_labels": ["no-magic"], + "exhaustive": true, + "empty_content": "- no matching PRs" + } + ], + "ignore_labels": [ + "ignore" + ], + "sort": { + "order": "ASC", + "on_property": "mergedAt" + }, + "template": "${{CHANGELOG}}\n\n
\nUncategorized\n\n${{UNCATEGORIZED}}\n
", + "pr_template": "- ${{TITLE}}\n - PR: #${{NUMBER}}", + "empty_template": "- no changes", + "label_extractor": [ + { + "pattern": "(.) (.+)", + "target": "$1", + "flags": "gu" + }, + { + "pattern": "\\[Issue\\]", + "on_property": "title", + "method": "match" + } + ], + "duplicate_filter": { + "pattern": "\\[ABC-....\\]", + "on_property": "title", + "method": "match" + }, + "transformers": [ + { + "pattern": "[\\-\\*] (\\[(...|TEST|CI|SKIP)\\])( )?(.+?)\n(.+?[\\-\\*] )(.+)", + "target": "- $4\n - $6" + } + ], + "max_tags_to_fetch": 200, + "max_pull_requests": 200, + "max_back_track_time_days": 365, + "exclude_merge_branches": [ + "Owner/qa" + ], + "tag_resolver": { + "method": "semver", + "filter": { + "pattern": "api-(.+)", + "flags": "gu" + } + }, + "base_branches": [ + "dev" + ] +} \ No newline at end of file diff --git a/.github/workflows/code-coverage.yml b/.github/workflows/code-coverage.yml new file mode 100644 index 00000000..efeac0d9 --- /dev/null +++ b/.github/workflows/code-coverage.yml @@ -0,0 +1,71 @@ +name: Linux CI && Code Coverage + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+' + +env: + CARGO_TERM_COLOR: always + +jobs: + code-coverage: + permissions: + contents: write + env: + CARGO_INCREMENTAL: '0' + RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' + RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Use nightly toolchain + run: | + rustup toolchain install nightly + rustup override set nightly + + - name: Caching cargo dependencies + id: project-cache + uses: Swatinem/rust-cache@v2 + + - if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }} + name: Install grcov + run: cargo install grcov + + - name: Make the USER own the working directory + if: ${{ matrix.os == 'ubuntu-latest' }} + run: sudo chown -R $USER:$USER ${{ github.workspace }} + + - name: Waking up docker + run: docker-compose -f ./docker/docker-compose.yml up -d + + - name: Run tests + run: | + cargo test --all-features --no-fail-fast --target=x86_64-unknown-linux-gnu -- --show-output --test-threads=1 + + - name: Waking up docker + run: | + docker-compose -f ./docker/docker-compose.yml down + sudo chown -R $USER:$USER ${{ github.workspace }} + rm -rf ./docker/postgres-data + + - name: Generate code coverage report + run: | + grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage + grcov . -s . --binary-path ./target/debug/ -t cobertura --branch --ignore-not-existing -o ./target/debug/coverage/code_cov.xml + + - name: Publish Test Results + uses: actions/upload-artifact@v3 + with: + name: Unit Test Results + path: | + ./target/debug/coverage/code_cov.xml + ./target/debug/coverage/index.html + + - name: Publish coverage report to GitHub Pages + uses: JamesIves/github-pages-deploy-action@v4 + with: + folder: ./target/debug/coverage + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml new file mode 100644 index 00000000..a1ad7058 --- /dev/null +++ b/.github/workflows/code-quality.yml @@ -0,0 +1,60 @@ +name: Code quality and sanity + +on: + push: + branches: '*' + pull_request: + branches: '*' + +jobs: + clippy: + name: Lint with Clippy + runs-on: ubuntu-latest + env: + RUSTFLAGS: -Dwarnings + steps: + - uses: actions/checkout@v3 + + - name: Caching project dependencies + id: project-cache + uses: Swatinem/rust-cache@v2 + + - uses: hecrj/setup-rust-action@v1 + with: + components: clippy + - run: cargo clippy --workspace --all-targets --verbose --all-features -- -A clippy::question_mark + rustfmt: + name: Verify code formatting + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Caching project dependencies + id: project-cache + uses: Swatinem/rust-cache@v2 + + - uses: hecrj/setup-rust-action@v1 + with: + components: rustfmt + + - run: cargo fmt --all -- --check + + check-rustdoc-links: + name: Check intra-doc links + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + crate: [canyon_connection, canyon_crud, canyon_macros, canyon_observer, canyon_observer, canyon_sql] + steps: + - uses: actions/checkout@v3 + + - name: Caching project dependencies + id: project-cache + uses: Swatinem/rust-cache@v2 + + - uses: hecrj/setup-rust-action@v1 + with: + rust-version: nightly + + - run: cargo rustdoc -p ${{ matrix.crate }} --all-features -- -D warnings diff --git a/.github/workflows/continuous-integration.yml b/.github/workflows/continuous-integration.yml new file mode 100644 index 00000000..82c4ffb9 --- /dev/null +++ b/.github/workflows/continuous-integration.yml @@ -0,0 +1,53 @@ +name: Continuous Integration + +on: + push: + branches: 'main' + pull_request: + branches: 'main' + +env: + CARGO_TERM_COLOR: always + +jobs: + multiplatform-tests: + name: Testing on Rust ${{ matrix.rust }} for ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - { rust: stable, os: ubuntu-latest } + - { rust: nightly, os: ubuntu-latest } + - { rust: stable, os: macos-latest } + - { rust: stable, os: windows-latest } + + steps: + - name: Make the USER own the working directory + if: ${{ matrix.os == 'ubuntu-latest' }} + run: sudo chown -R $USER:$USER ${{ github.workspace }} + + - uses: actions/checkout@v3 + + - name: docker-compose + if: ${{ matrix.os == 'ubuntu-latest' }} + run: docker-compose -f ./docker/docker-compose.yml up -d + + - name: Caching cargo dependencies + id: project-cache + uses: Swatinem/rust-cache@v2 + + - uses: hecrj/setup-rust-action@v1 + with: + rust-version: ${{ matrix.rust }} + + - name: Load data for MSSQL tests + if: ${{ matrix.os == 'ubuntu-latest' }} + run: cargo test initialize_sql_server_docker_instance -p tests --all-features --no-fail-fast -- --show-output --nocapture --include-ignored + + - name: Run all tests, UNIT and INTEGRATION for Linux targets + if: ${{ matrix.os == 'ubuntu-latest' }} + run: cargo test --verbose --workspace --all-features --no-fail-fast -- --show-output --test-threads=1 + + - name: Run UNIT tests with no external connections for the rest of the defined targets + run: cargo test --verbose --workspace --exclude tests --all-features --no-fail-fast -- --show-output diff --git a/.github/workflows/macos-tests.yml b/.github/workflows/macos-tests.yml new file mode 100644 index 00000000..21ca2e01 --- /dev/null +++ b/.github/workflows/macos-tests.yml @@ -0,0 +1,27 @@ +name: macOS CI + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+' + +env: + CARGO_TERM_COLOR: always + +jobs: + linux-tests: + runs-on: macos-latest + name: Tests for macOS + env: + CARGO_TERM_COLOR: always + steps: + - uses: actions/checkout@v3 + + - name: Caching cargo deps + id: ci-cache + uses: Swatinem/rust-cache@v2 + + - name: Running tests for macOS targets + run: | + cargo test --all-features --workspace --exclude tests \ No newline at end of file diff --git a/.github/workflows/publish-tests-results.yml b/.github/workflows/publish-tests-results.yml deleted file mode 100644 index 4253d472..00000000 --- a/.github/workflows/publish-tests-results.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Unit Test Results - -on: - workflow_run: - workflows: ["gcov"] - types: - - completed - -jobs: - unit-test-results: - name: Unit Test Results - runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion != 'skipped' - - steps: - - name: Download and Extract Artifacts - env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - run: | - mkdir -p artifacts && cd artifacts - - artifacts_url=${{ github.event.workflow_run.artifacts_url }} - - gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact - do - IFS=$'\t' read name url <<< "$artifact" - gh api $url > "$name.zip" - unzip -d "$name" "$name.zip" - done - - - name: Publish Unit Test Results - uses: EnricoMi/publish-unit-test-result-action@v2 - with: - commit: ${{ github.event.workflow_run.head_sha }} - event_file: artifacts/Event File/event.json - event_name: ${{ github.event.workflow_run.event }} - files: "artifacts/*.xml" \ No newline at end of file diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml deleted file mode 100644 index dd2f036d..00000000 --- a/.github/workflows/publish.yaml +++ /dev/null @@ -1,27 +0,0 @@ -on: - push: - # Pattern matched against refs/tags - tags: - - '*' # Push events to every tag not containing / - workflow_dispatch: - -name: Publish Canyon-SQL to Crates.io - -jobs: - publish: - name: Publish - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Install stable toolchain - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: stable - override: true - - - run: cargo publish --token ${secrets.CRATES_IO_TOKEN} - env: - CRATES_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} ## \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..dae50ee0 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,71 @@ +name: Generate Canyon-SQL release + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+' + +jobs: + publish: + name: Publish Canyon-SQL + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Install stable toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - uses: katyo/publish-crates@v1 + with: + registry-token: ${{ secrets.CRATES_IO_TOKEN }} + path: './canyon_connection' + + - uses: katyo/publish-crates@v1 + with: + registry-token: ${{ secrets.CRATES_IO_TOKEN }} + path: './canyon_crud' + + - uses: katyo/publish-crates@v1 + with: + registry-token: ${{ secrets.CRATES_IO_TOKEN }} + path: './canyon_observer' + + - uses: katyo/publish-crates@v1 + with: + registry-token: ${{ secrets.CRATES_IO_TOKEN }} + path: './canyon_macros' + + - uses: katyo/publish-crates@v1 + with: + registry-token: ${{ secrets.CRATES_IO_TOKEN }} + path: './canyon_sql' + + release-publisher: + permissions: + contents: write + name: Generate a new release and update the CHANGELOG + runs-on: ubuntu-latest + steps: + - name: Generate a new Canyon-SQL release on GitHub + uses: actions/create-release@v1 + id: create-release + with: + draft: false + prerelease: false + release_name: ${{ steps.version.outputs.version }} + tag_name: ${{ github.ref }} + env: + GITHUB_TOKEN: ${{ github.token }} + + - name: "Update the CHANGELOG.md for the release" + uses: mikepenz/release-changelog-builder-action@{latest-release} + with: + configuration: "./.github/changelog_configuration.json" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index f1df7493..00000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,74 +0,0 @@ -name: Run the tests for the project - -on: - push: - branches: [ "main, development" ] - pull_request: - branches: [ "main" ] - -env: - CARGO_TERM_COLOR: always - -jobs: - gcov: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Use nightly toolchain - run: | - rustup toolchain install nightly - rustup override set nightly - - - name: Install grcov - run: cargo install grcov - - - name: Run tests - env: - CARGO_INCREMENTAL: '0' - RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' - RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' - run: | - cargo test --all-features --no-fail-fast --target=x86_64-unknown-linux-gnu - - - name: Generate code coverage report - if: always() - env: - CARGO_INCREMENTAL: '0' - RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' - RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' - run: | - grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage - grcov . -s . --binary-path ./target/debug/ -t cobertura --branch --ignore-not-existing -o ./target/debug/coverage/code_cov.xml - - # - name: Publish Test Results from XML - # uses: EnricoMi/publish-unit-test-result-action@v2 - # if: always() - # with: - # junit_files: "./target/debug/coverage/code_cov.xml" - - - name: Show me directories - if: always() - run: | - ls -la ./target/debug - ls -la ./target/debug/coverage - ls -la ./target/debug/coverage/badges - - - name: Upload - uses: actions/upload-artifact@v3 - with: - name: Event File - path: ${{ github.event_path }} - - name: Publish Test Results - uses: actions/upload-artifact@v3 - with: - name: Unit Test Results - path: | - ./target/debug/coverage/code_cov.xml - ./target/debug/coverage/index.html - - - name: Publish coverage report to GitHub Pages - if: ${{ github.ref == 'refs/heads/main' }} - uses: JamesIves/github-pages-deploy-action@v4 - with: - folder: ./target/debug/coverage diff --git a/.github/workflows/windows-tests.yml b/.github/workflows/windows-tests.yml new file mode 100644 index 00000000..a6ace765 --- /dev/null +++ b/.github/workflows/windows-tests.yml @@ -0,0 +1,27 @@ +name: Windows CI + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+rc[0-9]+' + +env: + CARGO_TERM_COLOR: always + +jobs: + windows-tests: + runs-on: windows-latest + name: Tests for Windows + env: + CARGO_TERM_COLOR: always + steps: + - uses: actions/checkout@v3 + + - name: Caching cargo deps + id: ci-cache + uses: Swatinem/rust-cache@v2 + + - name: Running tests for Windows OS targets + run: | + cargo test --all-features --workspace --exclude tests diff --git a/.gitignore b/.gitignore index 424822ed..a38bca38 100755 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ Cargo.lock /tester_canyon_sql/ canyon_tester/ macro_utils.rs -.vscode/ \ No newline at end of file +.vscode/ +postgres-data/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..0e1d3570 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,16 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +Year format is defined as: `YYYY-m-d` + +## [Unreleased] + +## [0.1.0] - 2022 - 12 - 25 + +### Added + +- Launched the first release. Published at [crates.io](https://crates.io) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..be13e148 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,117 @@ +# Contributing to CONTRIBUTING.md + +First off, thanks for taking the time to contribute! + +All types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. + +> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about: +> - Star the project +> - Tweet about it +> - Refer this project in your project's readme +> - Mention the project at local meetups and tell your friends/colleagues + + +## Table of Contents + +- [Code of Conduct](#code-of-conduct) +- [I Have a Question](#i-have-a-question) +- [I Want To Contribute](#i-want-to-contribute) +- [Reporting Bugs](#reporting-bugs) +- [Suggesting Enhancements](#suggesting-enhancements) +- [Your First Code Contribution](#your-first-code-contribution) +- [Improving The Documentation](#improving-the-documentation) +- [Styleguides](#styleguides) +- [Commit Messages](#commit-messages) +- [Join The Project Team](#join-the-project-team) + + +## Code of Conduct + +This project and everyone participating in it is governed by the +[CONTRIBUTING.md Code of Conduct](blob/master/CODE_OF_CONDUCT.md). +By participating, you are expected to uphold this code. Please report unacceptable behavior +to <>. + + +## I Have a Question + +> If you want to ask a question, we assume that you have read the available [Documentation](https://github.com/zerodaycode/canyon-book). + +Before you ask a question, it is best to search for existing [Issues](/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue. It is also advisable to search the internet for answers first. + +If you then still feel the need to ask a question and need clarification, we recommend the following: + +- Open an [Issue](/issues/new). +- Provide as much context as you can about what you're running into. +- Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. + +We will then take care of the issue as soon as possible. + + + +## I Want To Contribute + +> ### Legal Notice +> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license. + +### Reporting Bugs + + +#### Before Submitting a Bug Report + +A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible. + +- Make sure that you are using the latest version. +- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](https://github.com/zerodaycode/canyon-book). If you are looking for support, you might want to check [this section](#i-have-a-question)). +- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](issues?q=label%3Abug). +- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue. +- Collect information about the bug: +- Stack trace (Traceback) +- OS, Platform and Version (Windows, Linux, macOS, x86, ARM) +- Version of the interpreter, compiler, SDK, runtime environment, package manager, depending on what seems relevant. +- Possibly your input and the output +- Can you reliably reproduce the issue? And can you also reproduce it with older versions? + + +#### How Do I Submit a Good Bug Report? + +> You must never report security related issues, vulnerabilities or bugs including sensitive information to the issue tracker, or elsewhere in public. Instead sensitive bugs must be sent by email to <>. + + +We use GitHub issues to track bugs and errors. If you run into an issue with the project: + +- Open an [Issue](/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.) +- Explain the behavior you would expect and the actual behavior. +- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case. +- Provide the information you collected in the previous section. + +Once it's filed: + +- The project team will label the issue accordingly. +- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented by someone. + +### Suggesting Enhancements + +If you want to suggest an enhacement or new feature for the project, please [open a new issue](/issues) describing what you desire to improve, and, potentially, how you plan to contribute to the project. + +#### Before Submitting an Enhancement + +- Make sure that you are using the latest version. +- Read the [documentation](https://github.com/zerodaycode/canyon-book) carefully and find out if the functionality is already covered, maybe by an individual configuration. +- Perform a [search](/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one. +- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library. + +#### How Do I Submit a Good Enhancement Suggestion? + +Enhancement suggestions are tracked as [GitHub issues](/issues). + +- Use a **clear and descriptive title** for the issue to identify the suggestion. +- Provide a **step-by-step description of the suggested enhancement** in as many details as possible. +- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you. +- You may want to **include screenshots and animated GIFs** which help you demonstrate the steps or point out the part which the suggestion is related to. You can use [this tool](https://www.cockos.com/licecap/) to record GIFs on macOS and Windows, and [this tool](https://github.com/colinkeenan/silentcast) or [this tool](https://github.com/GNOME/byzanz) on Linux. +- **Explain why this enhancement would be useful** to most CONTRIBUTING.md users. You may also want to point out the other projects that solved it better and which could serve as inspiration. + + +## Attribution +This guide is based on the **contributing.md**. [Make your own](https://contributing.md/)! diff --git a/Cargo.toml b/Cargo.toml index 85971f6b..800ad578 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,12 @@ # This is the root Cargo.toml file that serves as manager for the workspace of the project [workspace] - members = [ "canyon_sql", "canyon_observer", - "canyon_manager", "canyon_macros", "canyon_crud", - "canyon_connection" -] \ No newline at end of file + "canyon_connection", + + "tests" +] diff --git a/LICENSE b/LICENSE new file mode 100755 index 00000000..6f00f362 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +# MIT License + +Copyright (c) 2022 Zero Day Code + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 762188fa..61c69c3c 100755 --- a/README.md +++ b/README.md @@ -1,11 +1,45 @@ # CANYON-SQL -**A full written in `Rust` ORM for `POSTRESQL` based databases.** -## Early stages advice -The library it's still on a `early stage` state. -Any contrib via `fork` + `PR` it's really appreciated, specially if you like concepts like backend development, relational - mapping, low-level code, performance optimizations and, of course, `RUST`. +**A full written in `Rust` ORM for multiple databases.** +- [![Linux CI](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/code-coverage.yml/badge.svg)](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/code-coverage.yml) +- [![Code Coverage Measure](https://zerodaycode.github.io/Canyon-SQL/badges/flat.svg)](https://zerodaycode.github.io/Canyon-SQL) +- [![Tests on macOS](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/macos-tests.yml/badge.svg)](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/macos-tests.yml) +- [![Tests on Windows](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/windows-tests.yml/badge.svg)](https://github.com/zerodaycode/Canyon-SQL/actions/workflows/windows-tests.yml) + +`Canyon-SQL` is a high level abstraction for working with multiple databases concurrently. Is build on top of the `async` language features +to provide a high speed, high performant library to handling data access for consumers. + +## Early stage advice + +The library it's still on a `early stage` state. +Any contrib via `fork` + `PR` it's really appreciated. Currently we are involved in a really active development on the project. Near to december 2022, first release will be published and available in `crates.io`. + +## Most important features + +- **Async** by default. Almost every functionality provided is ready to be consumed concurrently. +- Use of multiple datasources. You can query multiple databases at the same time, even different ones!. This means that you will be able to query concurrently +a `PostgreSQL` database and an `SqlServer` one in the same project. +- Is macro based. With a few annotations and a configuration file, you are ready to write your data access. +- Allows **migrations**. `Canyon-SQL` comes with a *god-mode* that will manage every table on your database for you. You can modify in `Canyon` code your tables internally, altering columns, setting up constraints... +Also, in the future, we have plans to allow you to manipulate the whole server, like creating databases, altering configurations... everything, but in a programatically approach with `Canyon`! + +## Supported databases + +`Canyon-SQL` currently has support for work with the following databases: + +- PostgreSQL (via `tokio-postgres` crate) +- SqlServer (via `tiberius` crate) + +Every crate listed above is an `async` based crate, in line with the guidelines of the `Canyon-SQL` design. + +There are plans for include more databases, but is not one of the priorities of the development team nowadays. + +## Full documentation resources -## Documentation There is a `work-in-progress` web page, build with `mdBook` containing the official documentation. -You can read it [here](https://zerodaycode.github.io/canyon-book/) \ No newline at end of file +You can read it [by clicking this link](https://zerodaycode.github.io/canyon-book/) + +> At this time, and while this comment is in this README.md file, the documentation linked above is outdated +with the current library implementation. This will took to update probably several weeks, so take in consideration +wait for this comment to dissapear from here, because the project is under a rewrite process. diff --git a/bash_aliases.sh b/bash_aliases.sh new file mode 100644 index 00000000..0466aac8 --- /dev/null +++ b/bash_aliases.sh @@ -0,0 +1,49 @@ +#!/bin/sh + +# This file provides command alias commonly used by the developers involved in Canyon-SQL +# This alias avoid the usage of a bunch of commands for performn an integrated task that +# depends on several concatenated commands. + +# In order to run the script, simply type `$ . ./alias.sh` from the root of the project. +# (refreshing the current terminal session could be required) + +# Executes the docker compose script to wake up the postgres container +alias DockerUp='docker-compose -f ./docker/docker-compose.yml up' +# Shutdown the postgres container +alias DockerDown='docker-compose -f ./docker/docker-compose.yml down' +# Cleans the generated cache folder for the postgres in the docker +alias CleanPostgres='rm -rf ./docker/postgres-data' + +# Build the project for Windows targets +alias BuildCanyonWin='cargo build --all-features --target=x86_64-pc-windows-msvc' +alias BuildCanyonWinFull='cargo clean && cargo build --all-features --target=x86_64-pc-windows-msvc' + +# Build the project for Linux targets +alias BuildCanyonLinux='cargo build --all-features --target=x86_64-unknown-linux-gnu' +alias BuildCanyonLinuxFull='cargo clean && cargo build --all-features --target=x86_64-unknown-linux-gnu' + +# Runs all the tests within Canyon-SQL for Windows targets +alias TestsWin='cargo test --all-features --no-fail-fast --target=x86_64-pc-windows-msvc -- --show-output --nocapture' +# Runs all the tests within Canyon-SQL for Linux targets +alias TestsLinux='cargo test --all-features --no-fail-fast --target=x86_64-unknown-linux-gnu -- --show-output --nocapture' + +# Runs the integration tests of the project for a Windows target +alias IntegrationTestsWin='cargo test --all-features --no-fail-fast -p tests --target=x86_64-pc-windows-msvc -- --show-output --test-threads=1 --nocapture' +alias ITIncludeIgnoredWin='cargo test --all-features --no-fail-fast -p tests --target=x86_64-pc-windows-msvc -- --show-output --test-threads=1 --nocapture --test-threads=1 --include-ignored' +alias SqlServerInitializationWin='cargo test initialize_sql_server_docker_instance -p tests --all-features --no-fail-fast --target=x86_64-pc-windows-msvc -- --show-output --test-threads=1 --nocapture --include-ignored' + +# Runs the integration tests of the project for a Linux target +alias IntegrationTestsLinux='cargo test --all-features --no-fail-fast -p tests --target=x86_64-unknown-linux-gnu -- --show-output --test-threads=1 --nocapture' +alias ITIncludeIgnoredLinux='cargo test --all-features --no-fail-fast -p tests --target=x86_64-unknown-linux-gnu -- --show-output --test-threads=1 --nocapture --test-threads=1 --include-ignored' +alias SqlServerInitializationLinux='cargo test initialize_sql_server_docker_instance -p tests --all-features --no-fail-fast --target=x86_64-unknown-linux-gnu -- --show-output --test-threads=1 --nocapture --include-ignored' + + +# Publish Canyon-SQL to the registry with its dependencies +alias PublishCanyon='cargo publish -p canyon_connection && cargo publish -p canyon_crud && cargo publish -p canyon_observer && cargo publish -p canyon_macros && cargo publish -p canyon_sql' + +# Collects the code coverage for the project (tests must run before this) +alias CcEnvVars='export CARGO_INCREMENTAL=0 +export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort" +export RUSTDOCFLAGS="-Cpanic=abort"' + +alias CodeCov='grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing -o ./target/debug/coverage' \ No newline at end of file diff --git a/canyon_connection/Cargo.toml b/canyon_connection/Cargo.toml index 5ea4efac..126b29e3 100644 --- a/canyon_connection/Cargo.toml +++ b/canyon_connection/Cargo.toml @@ -1,11 +1,14 @@ [package] name = "canyon_connection" -version = "1.0.0" +version = "0.0.1" edition = "2021" [dependencies] -tokio = { version = "1.9.0", features = ["full"] } +tokio = { version = "1.21.2", features = ["full"] } +tokio-util = { version = "0.7.4", features = ["compat"] } tokio-postgres = { version = "0.7.2", features = ["with-chrono-0_4"] } +futures = "0.3.25" +indexmap = "1.9.1" tiberius = { version = "0.11.3", features = ["tds73", "chrono"] } async-std = { version = "1.12.0" } diff --git a/canyon_connection/src/canyon_database_connector.rs b/canyon_connection/src/canyon_database_connector.rs index ec094010..bfb9bc43 100644 --- a/canyon_connection/src/canyon_database_connector.rs +++ b/canyon_connection/src/canyon_database_connector.rs @@ -1,75 +1,80 @@ use async_std::net::TcpStream; -use tiberius::{Config, AuthMethod}; -use tokio_postgres::{Client, Connection, NoTls, Socket, tls::NoTlsStream}; +use serde::Deserialize; +use tiberius::{AuthMethod, Config}; +use tokio_postgres::{Client, NoTls}; use crate::datasources::DatasourceProperties; /// Represents the current supported databases by Canyon -#[derive(Debug)] +#[derive(Deserialize, Debug, Eq, PartialEq, Clone, Copy, Default)] pub enum DatabaseType { + #[default] + #[serde(alias = "postgres", alias = "postgresql")] PostgreSql, - SqlServer -} - -impl DatabaseType { - pub fn from_datasource(datasource: &DatasourceProperties<'_>) -> Self { - match datasource.db_type { - "postgresql" => Self::PostgreSql, - "sqlserver" => Self::SqlServer, - _ => todo!() // TODO Change for boxed dyn error type - } - } + #[serde(alias = "sqlserver", alias = "mssql")] + SqlServer, } /// A connection with a `PostgreSQL` database pub struct PostgreSqlConnection { pub client: Client, - pub connection: Connection + // pub connection: Connection, // TODO Hold it, or not to hold it... that's the question! } /// A connection with a `SqlServer` database pub struct SqlServerConnection { - pub client: tiberius::Client + pub client: &'static mut tiberius::Client, } -/// The Canyon database connection handler. +/// The Canyon database connection handler. When the client's program +/// starts, Canyon gets the information about the desired datasources, +/// process them and generates a pool of 1 to 1 database connection for +/// every datasource defined. pub struct DatabaseConnection { pub postgres_connection: Option, pub sqlserver_connection: Option, - pub database_type: DatabaseType + pub database_type: DatabaseType, } unsafe impl Send for DatabaseConnection {} unsafe impl Sync for DatabaseConnection {} impl DatabaseConnection { - pub async fn new(datasource: &DatasourceProperties<'_>) -> Result> { + pub async fn new( + datasource: &DatasourceProperties<'_>, + ) -> Result> { match datasource.db_type { - "postgresql" => { - let (new_client, new_connection) = - tokio_postgres::connect( + DatabaseType::PostgreSql => { + let (new_client, new_connection) = tokio_postgres::connect( &format!( "postgres://{user}:{pswd}@{host}:{port}/{db}", - user = datasource.username, - pswd = datasource.password, - host = datasource.host, - port = datasource.port.unwrap_or_default(), - db = datasource.db_name - )[..], - NoTls - ).await?; + user = datasource.username, + pswd = datasource.password, + host = datasource.host, + port = datasource.port.unwrap_or_default(), + db = datasource.db_name + )[..], + NoTls, + ) + .await?; + + tokio::spawn(async move { + if let Err(e) = new_connection.await { + eprintln!("An error occured while trying to connect to the PostgreSQL database: {e}"); + } + }); Ok(Self { postgres_connection: Some(PostgreSqlConnection { client: new_client, - connection: new_connection + // connection: new_connection, }), sqlserver_connection: None, - database_type: DatabaseType::from_datasource(&datasource) + database_type: DatabaseType::PostgreSql, }) - }, - "sqlserver" => { + } + DatabaseType::SqlServer => { let mut config = Config::new(); config.host(datasource.host); @@ -77,21 +82,26 @@ impl DatabaseConnection { config.database(datasource.db_name); // Using SQL Server authentication. - config.authentication( - AuthMethod::sql_server(datasource.username, datasource.password) - ); - - // on production, it is not a good idea to do this + config.authentication(AuthMethod::sql_server( + datasource.username, + datasource.password, + )); + + // on production, it is not a good idea to do this. We should upgrade + // Canyon in future versions to allow the user take care about this + // configuration config.trust_cert(); // Taking the address from the configuration, using async-std's // TcpStream to connect to the server. - let tcp = TcpStream::connect(config.get_addr()).await - .ok().expect("Error instanciating the SqlServer TCP Stream"); + let tcp = TcpStream::connect(config.get_addr()) + .await + .expect("Error instanciating the SqlServer TCP Stream"); // We'll disable the Nagle algorithm. Buffering is handled // internally with a `Sink`. - tcp.set_nodelay(true).ok().expect("Error in the SqlServer `nodelay` config"); + tcp.set_nodelay(true) + .expect("Error in the SqlServer `nodelay` config"); // Handling TLS, login and other details related to the SQL Server. let client = tiberius::Client::connect(config, tcp).await; @@ -99,22 +109,40 @@ impl DatabaseConnection { Ok(Self { postgres_connection: None, sqlserver_connection: Some(SqlServerConnection { - client: client.ok().expect("A failure happened connecting to the database") + client: Box::leak(Box::new( + client.expect("A failure happened connecting to the database"), + )), }), - database_type: DatabaseType::from_datasource(&datasource) + database_type: DatabaseType::SqlServer, }) - }, - &_ => return Err( - std::io::Error::new( - std::io::ErrorKind::Unsupported, - format!( - "There's no `{}` database supported in Canyon-SQL", - datasource.db_type - ) - ).into_inner().unwrap() - ) + } } } } - +#[cfg(test)] +mod database_connection_handler { + use super::*; + use crate::CanyonSqlConfig; + + const CONFIG_FILE_MOCK_ALT: &str = r#" + [canyon_sql] + datasources = [ + {name = 'PostgresDS', properties.db_type = 'postgresql', properties.username = 'username', properties.password = 'random_pass', properties.host = 'localhost', properties.db_name = 'triforce', properties.migrations='enabled'}, + {name = 'SqlServerDS', properties.db_type = 'sqlserver', properties.username = 'username2', properties.password = 'random_pass2', properties.host = '192.168.0.250.1', properties.port = 3340, properties.db_name = 'triforce2', properties.migrations='disabled'} + ] + "#; + + /// Tests the behaviour of the `DatabaseType::from_datasource(...)` + #[test] + fn check_from_datasource() { + let config: CanyonSqlConfig = toml::from_str(CONFIG_FILE_MOCK_ALT) + .expect("A failure happened retrieving the [canyon_sql] section"); + + let psql_ds = &config.canyon_sql.datasources[0].properties; + let sqls_ds = &config.canyon_sql.datasources[1].properties; + + assert_eq!(psql_ds.db_type, DatabaseType::PostgreSql); + assert_eq!(sqls_ds.db_type, DatabaseType::SqlServer); + } +} diff --git a/canyon_connection/src/datasources.rs b/canyon_connection/src/datasources.rs index 73158633..7c87583d 100644 --- a/canyon_connection/src/datasources.rs +++ b/canyon_connection/src/datasources.rs @@ -1,13 +1,14 @@ use serde::Deserialize; +use crate::canyon_database_connector::DatabaseType; /// ``` #[test] fn load_ds_config_from_array() { - const CONFIG_FILE_MOCK_ALT: &'static str = r#" + const CONFIG_FILE_MOCK_ALT: &str = r#" [canyon_sql] datasources = [ - {name = 'PostgresDS', properties.db_type = 'postgresql', properties.username = 'username', properties.password = 'random_pass', properties.host = 'localhost', properties.db_name = 'triforce'}, + {name = 'PostgresDS', properties.db_type = 'postgresql', properties.username = 'username', properties.password = 'random_pass', properties.host = 'localhost', properties.db_name = 'triforce', properties.migrations = 'enabled'}, {name = 'SqlServerDS', properties.db_type = 'sqlserver', properties.username = 'username2', properties.password = 'random_pass2', properties.host = '192.168.0.250.1', properties.port = 3340, properties.db_name = 'triforce2'} ] "#; @@ -15,50 +16,62 @@ fn load_ds_config_from_array() { let config: CanyonSqlConfig = toml::from_str(CONFIG_FILE_MOCK_ALT) .expect("A failure happened retrieving the [canyon_sql] section"); - let ds_0 = &config.canyon_sql.datasources[0]; - let ds_1 = &config.canyon_sql.datasources[1]; - - assert_eq!(ds_0.name, "PostgresDS"); - assert_eq!(ds_0.properties.db_type, "postgresql"); - assert_eq!(ds_0.properties.username, "username"); - assert_eq!(ds_0.properties.password, "random_pass"); - assert_eq!(ds_0.properties.host, "localhost"); - assert_eq!(ds_0.properties.port, None); - assert_eq!(ds_0.properties.db_name, "triforce"); + let ds_0 = &config.canyon_sql.datasources[0]; + let ds_1 = &config.canyon_sql.datasources[1]; - assert_eq!(ds_1.name, "SqlServerDS"); - assert_eq!(ds_1.properties.db_type, "sqlserver"); - assert_eq!(ds_1.properties.username, "username2"); - assert_eq!(ds_1.properties.password, "random_pass2"); - assert_eq!(ds_1.properties.host, "192.168.0.250.1"); - assert_eq!(ds_1.properties.port, Some(3340)); - assert_eq!(ds_1.properties.db_name, "triforce2"); + assert_eq!(ds_0.name, "PostgresDS"); + assert_eq!(ds_0.properties.db_type, DatabaseType::PostgreSql); + assert_eq!(ds_0.properties.username, "username"); + assert_eq!(ds_0.properties.password, "random_pass"); + assert_eq!(ds_0.properties.host, "localhost"); + assert_eq!(ds_0.properties.port, None); + assert_eq!(ds_0.properties.db_name, "triforce"); + assert_eq!(ds_0.properties.migrations, Some(Migrations::Enabled)); + + assert_eq!(ds_1.name, "SqlServerDS"); + assert_eq!(ds_1.properties.db_type, DatabaseType::SqlServer); + assert_eq!(ds_1.properties.username, "username2"); + assert_eq!(ds_1.properties.password, "random_pass2"); + assert_eq!(ds_1.properties.host, "192.168.0.250.1"); + assert_eq!(ds_1.properties.port, Some(3340)); + assert_eq!(ds_1.properties.db_name, "triforce2"); + assert_eq!(ds_1.properties.migrations, None); } -/// ``` +/// #[derive(Deserialize, Debug, Clone)] pub struct CanyonSqlConfig<'a> { #[serde(borrow)] - pub canyon_sql: Datasources<'a> + pub canyon_sql: Datasources<'a>, } #[derive(Deserialize, Debug, Clone)] pub struct Datasources<'a> { #[serde(borrow)] - pub datasources: Vec> + pub datasources: Vec>, } #[derive(Deserialize, Debug, Clone, Copy)] pub struct DatasourceConfig<'a> { #[serde(borrow)] - pub name: &'a str, - pub properties: DatasourceProperties<'a> -} + pub name: &'a str, + pub properties: DatasourceProperties<'a>, +} #[derive(Deserialize, Debug, Clone, Copy)] pub struct DatasourceProperties<'a> { - pub db_type: &'a str, - pub username: &'a str, + pub db_type: DatabaseType, + pub username: &'a str, pub password: &'a str, pub host: &'a str, pub port: Option, pub db_name: &'a str, -} \ No newline at end of file + pub migrations: Option, +} + +/// Represents the enabled or disabled migrations for a whole datasource +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +pub enum Migrations { + #[serde(alias = "Enabled", alias = "enabled")] + Enabled, + #[serde(alias = "Disabled", alias = "disabled")] + Disabled, +} diff --git a/canyon_connection/src/lib.rs b/canyon_connection/src/lib.rs index 62347df6..f762c985 100644 --- a/canyon_connection/src/lib.rs +++ b/canyon_connection/src/lib.rs @@ -1,25 +1,65 @@ +pub extern crate async_std; +pub extern crate futures; +pub extern crate lazy_static; +pub extern crate tiberius; pub extern crate tokio; pub extern crate tokio_postgres; -pub extern crate tiberius; -pub extern crate async_std; +pub extern crate tokio_util; pub mod canyon_database_connector; -mod datasources; +pub mod datasources; use std::fs; -use crate::datasources::{DatasourceConfig, CanyonSqlConfig}; +use crate::datasources::{CanyonSqlConfig, DatasourceConfig}; +use canyon_database_connector::DatabaseConnection; +use indexmap::IndexMap; use lazy_static::lazy_static; +use tokio::sync::Mutex; -const CONFIG_FILE_IDENTIFIER: &'static str = "canyon.toml"; - +const CONFIG_FILE_IDENTIFIER: &str = "canyon.toml"; lazy_static! { + pub static ref CANYON_TOKIO_RUNTIME: tokio::runtime::Runtime = + tokio::runtime::Runtime::new() // TODO Make the config with the builder + .expect("Failed initializing the Canyon-SQL Tokio Runtime"); + static ref RAW_CONFIG_FILE: String = fs::read_to_string(CONFIG_FILE_IDENTIFIER) .expect("Error opening or reading the Canyon configuration file"); static ref CONFIG_FILE: CanyonSqlConfig<'static> = toml::from_str(RAW_CONFIG_FILE.as_str()) .expect("Error generating the configuration for Canyon-SQL"); - pub static ref DATASOURCES: Vec> = CONFIG_FILE.canyon_sql.datasources.clone(); - pub static ref DEFAULT_DATASOURCE: DatasourceConfig<'static> = CONFIG_FILE.canyon_sql.datasources.clone()[0]; -} \ No newline at end of file + pub static ref DATASOURCES: Vec> = + CONFIG_FILE.canyon_sql.datasources.clone(); + + pub static ref CACHED_DATABASE_CONN: Mutex> = + Mutex::new(IndexMap::new()); +} + +/// Convenient free function to initialize a kind of connection pool based on the datasources present defined +/// in the configuration file. +/// +/// This avoids Canyon to create a new connection to the database on every query, potentially avoiding bottlenecks +/// derivated from the instanciation of that new conn every time. +/// +/// Note: We noticed with the integration tests that the [`tokio_postgres`] crate (PostgreSQL) is able to work in an async environment +/// with a new connection per query without no problem, but the [`tiberius`] crate (MSSQL) sufferes a lot when it has continuous +/// statements with multiple queries, like and insert followed by a find by id to check if the insert query has done its +/// job done. +pub async fn init_connections_cache() { + for datasource in DATASOURCES.iter() { + CACHED_DATABASE_CONN.lock().await.insert( + datasource.name, + Box::leak(Box::new( + DatabaseConnection::new(&datasource.properties) + .await + .unwrap_or_else(|_| { + panic!( + "Error pooling a new connection for the datasource: {:?}", + datasource.name + ) + }), + )), + ); + } +} diff --git a/canyon_crud/Cargo.toml b/canyon_crud/Cargo.toml index 5d50b54f..15ae1b83 100644 --- a/canyon_crud/Cargo.toml +++ b/canyon_crud/Cargo.toml @@ -1,10 +1,10 @@ [package] name = "canyon_crud" -version = "1.0.0" +version = "0.0.1" edition = "2021" [dependencies] chrono = { version = "0.4", features = ["serde"] } async-trait = { version = "0.1.50" } -canyon_connection = { path = "../canyon_connection" } \ No newline at end of file +canyon_connection = { version = "0.0.1", path = "../canyon_connection" } \ No newline at end of file diff --git a/canyon_crud/src/bounds.rs b/canyon_crud/src/bounds.rs index 403fe2ad..386f84e0 100644 --- a/canyon_crud/src/bounds.rs +++ b/canyon_crud/src/bounds.rs @@ -1,27 +1,25 @@ -use std::fmt::Debug; +#![allow(clippy::extra_unused_lifetimes)] +use crate::{ + crud::{CrudOperations, Transaction}, + mapper::RowMapper, +}; use canyon_connection::{ - tokio_postgres::types::ToSql, - tiberius::{ - IntoSql, - ColumnData - } + tiberius::{self, ColumnData, IntoSql}, + tokio_postgres::{self, types::ToSql}, }; +use chrono::{DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, Utc}; +use std::any::Any; -use chrono::{NaiveDate, NaiveDateTime, NaiveTime, DateTime, FixedOffset, Utc}; - -use crate::{crud::{CrudOperations, Transaction}, mapper::RowMapper}; - - -/// Created for retrieve the field's name of a field of a struct, giving +/// Created for retrieve the field's name of a field of a struct, giving /// the Canoyn's autogenerated enum with the variants that maps this /// fields. -/// +/// /// ``` /// pub struct Struct<'a> { /// pub some_field: &'a str /// } -/// +/// /// // Autogenerated enum /// #[derive(Debug)] /// #[allow(non_camel_case_types)] @@ -32,66 +30,192 @@ use crate::{crud::{CrudOperations, Transaction}, mapper::RowMapper}; /// So, to retrieve the field's name, something like this w'd be used on some part /// of the Canyon's Manager crate, to wire the necessary code to pass the field /// name, retrieved from the enum variant, to a called. -/// +/// /// // Something like: /// `let struct_field_name_from_variant = StructField::some_field.field_name_as_str();` -pub trait FieldIdentifier - where T: Transaction + CrudOperations + RowMapper + Debug +pub trait FieldIdentifier +where + T: Transaction + CrudOperations + RowMapper, { - fn field_name_as_str(self) -> String; + fn as_str(&self) -> &'static str; } /// Represents some kind of introspection to make the implementors -/// retrieves a value inside some variant of an associated enum type. -/// and convert it to an [`String`], to enable the convertion of -/// that value into something that can be part of an SQL query. -/// -/// It's a generification to convert everything to a string representation -/// in SQL syntax, so the clauses can use any value to make filters -/// +/// able to retrieve a value inside some variant of an associated enum type. +/// and convert it to a tuple struct formed by the column name as an String, +/// and the dynamic value of the [`QueryParameters<'_>`] trait object contained +/// inside the variant requested, +/// enabling a convertion of that value into something +/// that can be part of an SQL query. +/// +/// /// Ex: -/// `SELECT * FROM some_table WHERE id = '2'` -/// +/// `SELECT * FROM some_table WHERE id = 2` +/// /// That '2' it's extracted from some enum that implements [`FieldValueIdentifier`], /// where usually the variant w'd be something like: -/// +/// /// ``` /// pub enum Enum { /// IntVariant(i32) /// } /// ``` -/// so, the `.value(self)` method it's called over `self`, gets the value for that variant -/// (or another specified in the logic) and returns that value as an [`String`] -pub trait FieldValueIdentifier - where T: Transaction + CrudOperations + RowMapper + Debug +pub trait FieldValueIdentifier<'a, T> +where + T: Transaction + CrudOperations + RowMapper, { - fn value(self) -> String; -} - -impl FieldValueIdentifier for &str - where T: Transaction + CrudOperations + RowMapper + Debug -{ - fn value(self) -> String { - self.to_string() - } + fn value(self) -> (&'static str, &'a dyn QueryParameters<'a>); } /// Bounds to some type T in order to make it callable over some fn parameter T -/// +/// /// Represents the ability of an struct to be considered as candidate to perform /// actions over it as it holds the 'parent' side of a foreign key relation. -/// -/// Usually, it's used on the Canyon macros to retrieve the column that +/// +/// Usually, it's used on the Canyon macros to retrieve the column that /// this side of the relation it's representing pub trait ForeignKeyable { - // type Output; // TODO as /// Retrieves the field related to the column passed in - fn get_fk_column<'a>(&self, column: &'a str) -> Option; + fn get_fk_column(&self, column: &str) -> Option<&dyn QueryParameters<'_>>; } /// To define trait objects that helps to relates the necessary bounds in the 'IN` SQL clause pub trait InClauseValues: ToSql + ToString {} +/// Generic abstraction to represent any of the Row types +/// from the client crates +pub trait Row { + fn as_any(&self) -> &dyn Any; +} +impl Row for tokio_postgres::Row { + fn as_any(&self) -> &dyn Any { + self + } +} + +impl Row for tiberius::Row { + fn as_any(&self) -> &dyn Any { + self + } +} + +pub struct Column<'a> { + name: &'a str, + type_: ColumnType, +} +impl<'a> Column<'a> { + pub fn name(&self) -> &'_ str { + self.name + } + pub fn column_type(&self) -> &ColumnType { + &self.type_ + } + pub fn type_(&'a self) -> &'_ dyn Type { + match &self.type_ { + ColumnType::Postgres(v) => v as &'a dyn Type, + ColumnType::SqlServer(v) => v as &'a dyn Type, + } + } +} + +pub trait Type { + fn as_any(&self) -> &dyn Any; +} +impl Type for tokio_postgres::types::Type { + fn as_any(&self) -> &dyn Any { + self + } +} +impl Type for tiberius::ColumnType { + fn as_any(&self) -> &dyn Any { + self + } +} + +pub enum ColumnType { + Postgres(tokio_postgres::types::Type), + SqlServer(tiberius::ColumnType), +} + +pub trait RowOperations { + /// Abstracts the different forms of use the common `get` row + /// function or method dynamically no matter what are the origin + /// type from any database client provider + fn get<'a, Output>(&'a self, col_name: &str) -> Output + where + Output: tokio_postgres::types::FromSql<'a> + tiberius::FromSql<'a>; + + fn get_opt<'a, Output>(&'a self, col_name: &str) -> Option + where + Output: tokio_postgres::types::FromSql<'a> + tiberius::FromSql<'a>; + + fn columns(&self) -> Vec; +} + +impl RowOperations for &dyn Row { + fn get<'a, Output>(&'a self, col_name: &str) -> Output + where + Output: tokio_postgres::types::FromSql<'a> + tiberius::FromSql<'a>, + { + if let Some(row) = self.as_any().downcast_ref::() { + return row.get::<&str, Output>(col_name); + }; + if let Some(row) = self.as_any().downcast_ref::() { + return row + .get::(col_name) + .expect("Failed to obtain a row in the MSSQL migrations"); + }; + panic!() + } + + fn columns(&self) -> Vec { + let mut cols = vec![]; + + if self.as_any().is::() { + self.as_any() + .downcast_ref::() + .expect("Not a tokio postgres Row for column") + .columns() + .iter() + .for_each(|c| { + cols.push(Column { + name: c.name(), + type_: ColumnType::Postgres(c.type_().to_owned()), + }) + }) + } else { + self.as_any() + .downcast_ref::() + .expect("Not a Tiberius Row for column") + .columns() + .iter() + .for_each(|c| { + cols.push(Column { + name: c.name(), + type_: ColumnType::SqlServer(c.column_type()), + }) + }) + }; + + cols + } + + fn get_opt<'a, Output>(&'a self, col_name: &str) -> Option + where + Output: tokio_postgres::types::FromSql<'a> + tiberius::FromSql<'a>, + { + if let Some(row) = self.as_any().downcast_ref::() { + return row.get::<&str, Option>(col_name); + }; + if let Some(row) = self.as_any().downcast_ref::() { + return row + .try_get::(col_name) + .expect("Failed to obtain a row in the MSSQL migrations"); + }; + panic!() + } +} + /// Defines a trait for represent type bounds against the allowed /// datatypes supported by Canyon to be used as query parameters. pub trait QueryParameters<'a>: std::fmt::Debug + Sync + Send { @@ -99,10 +223,9 @@ pub trait QueryParameters<'a>: std::fmt::Debug + Sync + Send { fn as_sqlserver_param(&self) -> ColumnData<'_>; } - -/// The implementation of the [`tiberius`] [`IntoSql`] for the +/// The implementation of the [`canyon_connection::tiberius`] [`IntoSql`] for the /// query parameters. -/// +/// /// This implementation is necessary because of the generic amplitude /// of the arguments of the [`Transaction::query`], that should work with /// a collection of [`QueryParameters<'a>`], in order to allow a workflow @@ -219,7 +342,9 @@ impl<'a> QueryParameters<'a> for Option<&f32> { } fn as_sqlserver_param(&self) -> ColumnData<'_> { - ColumnData::F32(Some(*self.expect("Error on an f32 value on QueryParameters<'_>"))) + ColumnData::F32(Some( + *self.expect("Error on an f32 value on QueryParameters<'_>"), + )) } } impl<'a> QueryParameters<'a> for f64 { @@ -255,7 +380,9 @@ impl<'a> QueryParameters<'a> for Option<&f64> { } fn as_sqlserver_param(&self) -> ColumnData<'_> { - ColumnData::F64(Some(*self.expect("Error on an f64 value on QueryParameters<'_>"))) + ColumnData::F64(Some( + *self.expect("Error on an f64 value on QueryParameters<'_>"), + )) } } impl<'a> QueryParameters<'a> for i64 { @@ -319,9 +446,7 @@ impl<'a> QueryParameters<'a> for Option { fn as_sqlserver_param(&self) -> ColumnData<'_> { match self { - Some(string) => ColumnData::String( - Some(std::borrow::Cow::Owned(string.to_owned())) - ), + Some(string) => ColumnData::String(Some(std::borrow::Cow::Owned(string.to_owned()))), None => ColumnData::String(None), } } @@ -333,9 +458,7 @@ impl<'a> QueryParameters<'a> for Option<&String> { fn as_sqlserver_param(&self) -> ColumnData<'_> { match self { - Some(string) => ColumnData::String( - Some(std::borrow::Cow::Borrowed(string)) - ), + Some(string) => ColumnData::String(Some(std::borrow::Cow::Borrowed(string))), None => ColumnData::String(None), } } @@ -442,7 +565,7 @@ impl<'a> QueryParameters<'_> for DateTime { self.into_sql() } } -impl<'a> QueryParameters<'a> for Option> { +impl<'a> QueryParameters<'_> for Option> { fn as_postgres_param(&self) -> &(dyn ToSql + Sync) { self } @@ -450,4 +573,4 @@ impl<'a> QueryParameters<'a> for Option> { fn as_sqlserver_param(&self) -> ColumnData<'_> { self.into_sql() } -} \ No newline at end of file +} diff --git a/canyon_crud/src/crud.rs b/canyon_crud/src/crud.rs index 5dd277cb..6a97c4ac 100644 --- a/canyon_crud/src/crud.rs +++ b/canyon_crud/src/crud.rs @@ -1,224 +1,255 @@ -use std::fmt::{Debug, Display}; +use std::fmt::Display; use async_trait::async_trait; use canyon_connection::canyon_database_connector::DatabaseType; +use canyon_connection::CACHED_DATABASE_CONN; -use crate::{bounds::QueryParameters, query_elements::query_builder::QueryBuilder}; +use crate::bounds::QueryParameters; use crate::mapper::RowMapper; -use crate::result::DatabaseResult; - -use canyon_connection::{ - DATASOURCES, - DEFAULT_DATASOURCE, - canyon_database_connector::DatabaseConnection, +use crate::query_elements::query_builder::{ + DeleteQueryBuilder, SelectQueryBuilder, UpdateQueryBuilder, }; - +use crate::result::DatabaseResult; /// This traits defines and implements a query against a database given /// an statemt `stmt` and the params to pass the to the client. -/// +/// /// It returns a [`DatabaseResult`], which is the core Canyon type to wrap /// the result of the query and, if the user desires, /// automatically map it to an struct. #[async_trait] -pub trait Transaction { - /// Performs the necessary to execute a query against the database - async fn query<'a, S, Z>(stmt: S, params: Z, datasource_name: &'a str) - -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> - where - S: AsRef + Display + Sync + Send + 'a, - Z: AsRef<[&'a dyn QueryParameters<'a>]> + Sync + Send + 'a +#[allow(clippy::question_mark)] +pub trait Transaction { + /// Performs a query against the targeted database by the selected datasource. + /// + /// No datasource means take the entry zero + async fn query<'a, S, Z>( + stmt: S, + params: Z, + datasource_name: &'a str, + ) -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> + where + S: AsRef + Display + Sync + Send + 'a, + Z: AsRef<[&'a dyn QueryParameters<'a>]> + Sync + Send + 'a, { - let database_connection = if datasource_name == "" { - DatabaseConnection::new(&DEFAULT_DATASOURCE.properties).await - } else { // Get the specified one - DatabaseConnection::new( - &DATASOURCES.iter() - .find( |ds| ds.name == datasource_name) - .expect(&format!("No datasource found with the specified parameter: `{}`", datasource_name)) - .properties - ).await - }; + let guarded_cache = CACHED_DATABASE_CONN.lock().await; - if let Err(_db_conn) = database_connection { - todo!(); + let database_conn = if datasource_name.is_empty() { + guarded_cache + .values() + .next() + .expect("No default datasource found. Check your `canyon.toml` file") } else { - // No errors - let db_conn = database_connection.ok().unwrap(); - - match db_conn.database_type { - DatabaseType::PostgreSql => - postgres_query_launcher::launch::(db_conn, stmt.to_string(), params.as_ref()).await, - DatabaseType::SqlServer => - sqlserver_query_launcher::launch::(db_conn, &mut stmt.to_string(), params).await + guarded_cache.get(datasource_name) + .unwrap_or_else(|| + panic!("Canyon couldn't find a datasource in the pool with the argument provided: {datasource_name}" + )) + }; + + match database_conn.database_type { + DatabaseType::PostgreSql => { + postgres_query_launcher::launch::( + database_conn, + stmt.to_string(), + params.as_ref(), + ) + .await + } + DatabaseType::SqlServer => { + sqlserver_query_launcher::launch::( + database_conn, + &mut stmt.to_string(), + params, + ) + .await } } } } -/// [`CrudOperations`] it's one of the core parts of Canyon. -/// -/// Here it's defined and implemented every CRUD operation that Canyon -/// makes available to the user, directly derived with a `CanyonCrud` +/// *CrudOperations* it's the core part of Canyon-SQL. +/// +/// Here it's defined and implemented every CRUD operation +/// that the user has available, just by deriving the `CanyonCrud` /// derive macro when a struct contains the annotation. -/// -/// Also, this traits needs that the type T over what it's generified +/// +/// Also, this traits needs that the type T over what it's generified /// to implement certain types in order to work correctly. -/// +/// /// The most notorious one it's the [`RowMapper`] one, which allows /// Canyon to directly maps database results into structs. -/// -/// See it's definition and docs to see the real implications. +/// +/// See it's definition and docs to see the implementations. /// Also, you can find the written macro-code that performs the auto-mapping -/// in the [`canyon_macros`] crates, on the root of this project. +/// in the *canyon_sql::canyon_macros* crates, on the root of this project. #[async_trait] -pub trait CrudOperations: Transaction - where T: Debug + CrudOperations + RowMapper +pub trait CrudOperations: Transaction +where + T: CrudOperations + RowMapper, { async fn find_all<'a>() -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>; - - async fn find_all_datasource<'a>(datasource_name: &'a str) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>; - + + async fn find_all_datasource<'a>( + datasource_name: &'a str, + ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>; + async fn find_all_unchecked<'a>() -> Vec; - + async fn find_all_unchecked_datasource<'a>(datasource_name: &'a str) -> Vec; - fn find_all_query<'a>() -> QueryBuilder<'a, T>; - - fn find_all_query_datasource<'a>(datasource_name: &'a str) -> QueryBuilder<'a, T>; - + fn select_query<'a>() -> SelectQueryBuilder<'a, T>; + + fn select_query_datasource(datasource_name: &str) -> SelectQueryBuilder<'_, T>; + async fn count() -> Result>; - - async fn count_datasource<'a>(datasource_name: &'a str) -> Result>; - async fn find_by_pk<'a>(value: &'a dyn QueryParameters<'a>) - -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>; - + async fn count_datasource<'a>( + datasource_name: &'a str, + ) -> Result>; + + async fn find_by_pk<'a>( + value: &'a dyn QueryParameters<'a>, + ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>; + async fn find_by_pk_datasource<'a>( value: &'a dyn QueryParameters<'a>, - datasource_name: &'a str + datasource_name: &'a str, ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>>; - async fn insert<'a>(&mut self) - -> Result<(), Box>; + async fn insert<'a>( + &mut self, + ) -> Result<(), Box>; - async fn insert_datasource<'a>(&mut self, datasource_name: &'a str) - -> Result<(), Box>; + async fn insert_datasource<'a>( + &mut self, + datasource_name: &'a str, + ) -> Result<(), Box>; - async fn multi_insert<'a>(instances: &'a mut [&'a mut T]) - -> Result<(), Box<(dyn std::error::Error + Send + Sync + 'static)>>; + async fn multi_insert<'a>( + instances: &'a mut [&'a mut T], + ) -> Result<(), Box<(dyn std::error::Error + Send + Sync + 'static)>>; async fn multi_insert_datasource<'a>( instances: &'a mut [&'a mut T], - datasource_name: &'a str + datasource_name: &'a str, ) -> Result<(), Box<(dyn std::error::Error + Send + Sync + 'static)>>; - - async fn update(&self) - -> Result<(), Box>; - - async fn update_datasource<'a>(&self, datasource_name: &'a str) - -> Result<(), Box>; - - fn update_query<'a>() -> QueryBuilder<'a, T>; - fn update_query_datasource<'a>(datasource_name: &'a str) -> QueryBuilder<'a, T>; + async fn update(&self) -> Result<(), Box>; + + async fn update_datasource<'a>( + &self, + datasource_name: &'a str, + ) -> Result<(), Box>; + + fn update_query<'a>() -> UpdateQueryBuilder<'a, T>; + + fn update_query_datasource(datasource_name: &str) -> UpdateQueryBuilder<'_, T>; async fn delete(&self) -> Result<(), Box>; - async fn delete_datasource<'a>(&self, datasource_name: &'a str) - -> Result<(), Box>; - - fn delete_query<'a>() -> QueryBuilder<'a, T>; + async fn delete_datasource<'a>( + &self, + datasource_name: &'a str, + ) -> Result<(), Box>; - fn delete_query_datasource<'a>(datasource_name: &'a str) -> QueryBuilder<'a, T>; + fn delete_query<'a>() -> DeleteQueryBuilder<'a, T>; + + fn delete_query_datasource(datasource_name: &str) -> DeleteQueryBuilder<'_, T>; } mod postgres_query_launcher { - use std::fmt::Debug; - use canyon_connection::canyon_database_connector::DatabaseConnection; use crate::bounds::QueryParameters; use crate::result::DatabaseResult; + use canyon_connection::canyon_database_connector::DatabaseConnection; pub async fn launch<'a, T>( - db_conn: DatabaseConnection, + db_conn: &DatabaseConnection, + // datasource_name: &str, stmt: String, params: &'a [&'_ dyn QueryParameters<'_>], - ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> - where - T: Debug, - - { - let postgres_connection = db_conn.postgres_connection.unwrap(); - let (client, connection) = - (postgres_connection.client, postgres_connection.connection); - - canyon_connection::tokio::spawn(async move { - if let Err(e) = connection.await { - eprintln!("An error occured while trying to connect to the database: {}", e); - } - }); - + ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { let mut m_params = Vec::new(); for param in params { m_params.push(param.as_postgres_param()); } - let query_result = client.query(&stmt, m_params.as_slice()).await; + let query_result = db_conn + .postgres_connection + .as_ref() + .unwrap() + .client + .query(&stmt, m_params.as_slice()) + .await; - if let Err(error) = query_result { - Err(Box::new(error)) + if let Err(error) = query_result { + Err(Box::new(error)) } else { - Ok(DatabaseResult::new_postgresql(query_result.expect("A really bad error happened"))) + Ok(DatabaseResult::new_postgresql( + query_result.expect("A really bad error happened querying PostgreSQL"), + )) } } } mod sqlserver_query_launcher { - use std::fmt::Debug; + use std::mem::transmute; + + use canyon_connection::tiberius::Row; use crate::{ - canyon_connection::{ - async_std::net::TcpStream, - tiberius::{Query, Row, Client}, - canyon_database_connector::DatabaseConnection - }, - result::DatabaseResult, - bounds::QueryParameters + bounds::QueryParameters, + canyon_connection::{canyon_database_connector::DatabaseConnection, tiberius::Query}, + result::DatabaseResult, }; pub async fn launch<'a, T, Z>( - db_conn: DatabaseConnection, + db_conn: &&mut DatabaseConnection, stmt: &mut String, params: Z, - ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> - where - T: Debug, - Z: AsRef<[&'a dyn QueryParameters<'a>]> + Sync + Send + 'a + ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> + where + Z: AsRef<[&'a dyn QueryParameters<'a>]> + Sync + Send + 'a, { // Re-generate de insert statement to adecuate it to the SQL SERVER syntax to retrieve the PK value(s) after insert if stmt.contains("RETURNING") { let c = stmt.clone(); - let temp = c.split_once("RETURNING") + let temp = c + .split_once("RETURNING") .expect("An error happened generating an INSERT statement for a SQL SERVER client"); - let temp2 = temp.0.split_once("VALUES") - .expect("An error happened generating an INSERT statement for a SQL SERVER client [1]"); - - *stmt = format!("{} OUTPUT inserted.{} VALUES {}", temp2.0.trim(), temp.1.trim(), temp2.1.trim()); + let temp2 = temp.0.split_once("VALUES").expect( + "An error happened generating an INSERT statement for a SQL SERVER client [1]", + ); + + *stmt = format!( + "{} OUTPUT inserted.{} VALUES {}", + temp2.0.trim(), + temp.1.trim(), + temp2.1.trim() + ); } - let mut sql_server_query = Query::new(stmt.to_owned().replace("$", "@P")); - params.as_ref().into_iter().for_each( |param| sql_server_query.bind( *param )); - - let client: &mut Client = &mut db_conn.sqlserver_connection - .expect("Error querying the SqlServer database") // TODO Better msg? - .client; - - let _results: Vec = sql_server_query.query(client).await? - .into_results().await? + let mut mssql_query = Query::new(stmt.to_owned().replace('$', "@P")); + params + .as_ref() + .iter() + .for_each(|param| mssql_query.bind(*param)); + + #[allow(mutable_transmutes)] + let _results: Vec = mssql_query + .query( + unsafe { transmute::<&DatabaseConnection, &mut DatabaseConnection>(db_conn) } + .sqlserver_connection + .as_mut() + .expect("Error querying the MSSQL database") + .client, + ) + .await? + .into_results() + .await? .into_iter() .flatten() .collect::>(); Ok(DatabaseResult::new_sqlserver(_results)) } -} \ No newline at end of file +} diff --git a/canyon_crud/src/lib.rs b/canyon_crud/src/lib.rs index b3fdca41..8a20b48e 100644 --- a/canyon_crud/src/lib.rs +++ b/canyon_crud/src/lib.rs @@ -1,12 +1,12 @@ extern crate canyon_connection; +pub mod bounds; pub mod crud; -pub mod result; pub mod mapper; pub mod query_elements; -pub mod bounds; +pub mod result; pub use query_elements::operators::*; +pub use canyon_connection::{canyon_database_connector::DatabaseType, datasources::*}; pub use chrono; -pub use canyon_connection::canyon_database_connector::DatabaseType; \ No newline at end of file diff --git a/canyon_crud/src/mapper.rs b/canyon_crud/src/mapper.rs index 2d509ab0..71303785 100644 --- a/canyon_crud/src/mapper.rs +++ b/canyon_crud/src/mapper.rs @@ -1,14 +1,12 @@ -use std::fmt::Debug; -use canyon_connection::{tokio_postgres, tiberius}; +use canyon_connection::{tiberius, tokio_postgres}; use crate::crud::Transaction; -/// Sets the way of how to deserialize a custom type T -/// from a Row object retrieved from a database query -pub trait RowMapper>: Sized { - - /// Deserializes a database Row result into Self +/// Declares functions that takes care to deserialize data incoming +/// from some supported database in Canyon-SQL into a user's defined +/// type `T` +pub trait RowMapper>: Sized { fn deserialize_postgresql(row: &tokio_postgres::Row) -> T; fn deserialize_sqlserver(row: &tiberius::Row) -> T; -} \ No newline at end of file +} diff --git a/canyon_crud/src/query_elements/mod.rs b/canyon_crud/src/query_elements/mod.rs index 03aed181..e319d4a4 100644 --- a/canyon_crud/src/query_elements/mod.rs +++ b/canyon_crud/src/query_elements/mod.rs @@ -1,3 +1,3 @@ +pub mod operators; pub mod query; pub mod query_builder; -pub mod operators; \ No newline at end of file diff --git a/canyon_crud/src/query_elements/operators.rs b/canyon_crud/src/query_elements/operators.rs index 82de2062..00b42154 100644 --- a/canyon_crud/src/query_elements/operators.rs +++ b/canyon_crud/src/query_elements/operators.rs @@ -1,21 +1,32 @@ +pub trait Operator { + fn as_str(&self) -> &'static str; +} + +/// Enumerated type for represent the comparison operations +/// in SQL sentences pub enum Comp { + /// Operator "=" equals Eq, + /// Operator "!=" not equals Neq, + /// Operator ">" greather than value Gt, - Gte, + /// Operator ">=" greather or equals than value + GtEq, + /// Operator "<" less than value Lt, - Lte + /// Operator "=<" less or equals than value + LtEq, } - -impl Comp { - pub fn as_string(&self) -> String { +impl Operator for Comp { + fn as_str(&self) -> &'static str { match *self { - Self::Eq => " = ".to_string(), - Self::Neq => " <> ".to_string(), - Self::Gt => " > ".to_string(), - Self::Gte => " >= ".to_string(), - Self::Lt => " < ".to_string(), - Self::Lte => " <= ".to_string() + Self::Eq => " = ", + Self::Neq => " <> ", + Self::Gt => " > ", + Self::GtEq => " >= ", + Self::Lt => " < ", + Self::LtEq => " <= ", } } -} \ No newline at end of file +} diff --git a/canyon_crud/src/query_elements/query.rs b/canyon_crud/src/query_elements/query.rs index e24de614..0a059b64 100644 --- a/canyon_crud/src/query_elements/query.rs +++ b/canyon_crud/src/query_elements/query.rs @@ -1,32 +1,28 @@ use std::{fmt::Debug, marker::PhantomData}; use crate::{ - query_elements::query_builder::QueryBuilder, - crud::{Transaction, CrudOperations}, - mapper::RowMapper, - bounds::QueryParameters + bounds::QueryParameters, + crud::{CrudOperations, Transaction}, + mapper::RowMapper, }; - /// Holds a sql sentence details -#[derive(Clone)] -pub struct Query<'a, T: Debug + CrudOperations + Transaction + RowMapper> { +#[derive(Debug, Clone)] +pub struct Query<'a, T: CrudOperations + Transaction + RowMapper> { pub sql: String, - pub params: &'a[&'a dyn QueryParameters<'a>], - marker: PhantomData + pub params: Vec<&'a dyn QueryParameters<'a>>, + marker: PhantomData, } -impl<'a, T> Query<'a, T> - where - T: Debug + CrudOperations + Transaction + RowMapper +impl<'a, T> Query<'a, T> +where + T: CrudOperations + Transaction + RowMapper, { - pub fn new(sql: String, datasource_name: &'a str) -> QueryBuilder<'a, T> { - let self_ = Self { - sql: sql, - params: &[], - marker: PhantomData - }; - QueryBuilder::::new(self_, datasource_name) + pub fn new(sql: String) -> Query<'a, T> { + Self { + sql, + params: vec![], + marker: PhantomData, + } } } - diff --git a/canyon_crud/src/query_elements/query_builder.rs b/canyon_crud/src/query_elements/query_builder.rs index 5c86c225..ab6ae119 100644 --- a/canyon_crud/src/query_elements/query_builder.rs +++ b/canyon_crud/src/query_elements/query_builder.rs @@ -1,174 +1,697 @@ use std::fmt::Debug; use crate::{ + bounds::{FieldIdentifier, FieldValueIdentifier, QueryParameters}, + crud::{CrudOperations, Transaction}, + mapper::RowMapper, query_elements::query::Query, - query_elements::operators::Comp, - crud::{ - Transaction, - CrudOperations - }, - bounds::{ - FieldIdentifier, - FieldValueIdentifier, - InClauseValues - }, - mapper::RowMapper + Operator, }; +/// Contains the elements that makes part of the formal declaration +/// of the behaviour of the Canyon-SQL QueryBuilder +pub mod ops { + pub use super::*; -/// Builder for a query while chaining SQL clauses -#[derive(Clone)] -pub struct QueryBuilder<'a, T> + /// The [`QueryBuilder`] trait is the root of a kind of hierarchy + /// on more specific [`super::QueryBuilder`], that are: + /// + /// * [`super::SelectQueryBuilder`] + /// * [`super::UpdateQueryBuilder`] + /// * [`super::DeleteQueryBuilder`] + /// + /// This trait provides the formal declaration of the behaviour that the + /// implementors must provide in their public interfaces, groping + /// the common elements between every element down in that + /// hierarchy. + /// + /// For example, the [`super::QueryBuilder`] type holds the data + /// necessary for track the SQL sentece while it's being generated + /// thought the fluent builder, and provides the behaviour of + /// the common elements defined in this trait. + /// + /// The more concrete types represents a wrapper over a raw + /// [`super::QueryBuilder`], offering all the elements declared + /// in this trait in its public interface, and which implementation + /// only consists of call the same method on the wrapped + /// [`super::QueryBuilder`]. + /// + /// This allows us to declare in their public interface their + /// specific operations, like, for example, join operations + /// on the [`super::SelectQueryBuilder`], and the usage + /// of the `SET` clause on a [`super::UpdateQueryBuilder`], + /// without mixing types or convoluting everything into + /// just one type. + pub trait QueryBuilder<'a, T> where - T: Debug + CrudOperations + Transaction + RowMapper + T: Debug + CrudOperations + Transaction + RowMapper, + { + /// Returns a read-only reference to the underlying SQL sentence, + /// with the same lifetime as self + fn read_sql(&'a self) -> &'a str; + + /// Public interface for append the content of an slice to the end of + /// the underlying SQL sentece. + /// + /// This mutator will allow the user to wire SQL code to the already + /// generated one + /// + /// * `sql` - The [`&str`] to be wired in the SQL + fn push_sql(&mut self, sql: &str); + + /// Generates a `WHERE` SQL clause for constraint the query. + /// + /// * `column` - A [`FieldValueIdentifier`] that will provide the target + /// column name and the value for the filter + /// * `op` - Any element that implements [`Operator`] for create the comparison + /// or equality binary operator + fn r#where>( + &mut self, + column: Z, + op: impl Operator, + ) -> &mut Self + where + T: Debug + CrudOperations + Transaction + RowMapper; + + /// Generates an `AND` SQL clause for constraint the query. + /// + /// * `column` - A [`FieldValueIdentifier`] that will provide the target + /// column name and the value for the filter + /// * `op` - Any element that implements [`Operator`] for create the comparison + /// or equality binary operator + fn and>( + &mut self, + column: Z, + op: impl Operator, + ) -> &mut Self; + + /// Generates an `AND` SQL clause for constraint the query that will create + /// the filter in conjunction with an `IN` operator that will ac + /// + /// * `column` - A [`FieldIdentifier`] that will provide the target + /// column name for the filter, based on the variant that represents + /// the field name that maps the targeted column name + /// * `values` - An array of [`QueryParameters`] with the values to filter + /// inside the `IN` operator + fn and_values_in(&mut self, column: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>; + + /// Generates an `OR` SQL clause for constraint the query that will create + /// the filter in conjunction with an `IN` operator that will ac + /// + /// * `column` - A [`FieldIdentifier`] that will provide the target + /// column name for the filter, based on the variant that represents + /// the field name that maps the targeted column name + /// * `values` - An array of [`QueryParameters`] with the values to filter + /// inside the `IN` operator + fn or_values_in(&mut self, r#or: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>; + + /// Generates an `OR` SQL clause for constraint the query. + /// + /// * `column` - A [`FieldValueIdentifier`] that will provide the target + /// column name and the value for the filter + /// * `op` - Any element that implements [`Operator`] for create the comparison + /// or equality binary operator + fn or>(&mut self, column: Z, op: impl Operator) + -> &mut Self; + + /// Generates a `ORDER BY` SQL clause for constraint the query. + /// + /// * `order_by` - A [`FieldIdentifier`] that will provide the target + /// column name + /// * `desc` - a boolean indicating if the generated `ORDER_BY` must be + /// in ascending or descending order + fn order_by>(&mut self, order_by: Z, desc: bool) -> &mut Self; + } +} + +/// Type for construct more complex queries than the classical CRUD ones. +#[derive(Debug, Clone)] +pub struct QueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, { query: Query<'a, T>, - where_clause: String, - and_clause: String, - in_clause: &'a[Box], - order_by_clause: String, - set_clause: String, - datasource_name: &'a str + datasource_name: &'a str, +} + +unsafe impl<'a, T> Send for QueryBuilder<'a, T> where + T: CrudOperations + Transaction + RowMapper +{ } -impl<'a, T> QueryBuilder<'a, T> - where - T: Debug + CrudOperations + Transaction + RowMapper +unsafe impl<'a, T> Sync for QueryBuilder<'a, T> where + T: CrudOperations + Transaction + RowMapper { - // Generates a Query object that contains the necessary data to performn a query - pub async fn query(&'a mut self) - -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> - { - self.query.sql.retain(|c| !r#";"#.contains(c)); +} - if self.query.sql.contains("UPDATE") && self.set_clause != "" { - self.query.sql.push_str(&self.set_clause) - } else if !self.query.sql.contains("UPDATE") && self.set_clause != "" { - panic!( - "'SET' SQL statement only must be used in `T::update_query() associated functions`" - ); +impl<'a, T> QueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + /// Returns a new instance of the [`QueryBuilder`] + pub fn new(query: Query<'a, T>, datasource_name: &'a str) -> Self { + Self { + query, + datasource_name, } - - if self.where_clause != "" { - self.query.sql.push_str(&self.where_clause) + } + + /// Launches the generated query against the database targeted + /// by the selected datasource + #[allow(clippy::question_mark)] + pub async fn query( + &'a mut self, + ) -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> { + // Close the query, we are ready to go + self.query.sql.push(';'); + + let result = T::query( + self.query.sql.clone(), + self.query.params.to_vec(), + self.datasource_name, + ) + .await; + + if let Err(error) = result { + Err(error) + } else { + Ok(result.ok().unwrap().get_entities::()) } + } + + pub fn r#where>(&mut self, r#where: Z, op: impl Operator) { + let (column_name, value) = r#where.value(); + + let where_ = String::from(" WHERE ") + + column_name + + op.as_str() + + "$" + + &(self.query.params.len() + 1).to_string(); + + self.query.sql.push_str(&where_); + self.query.params.push(value); + } + + pub fn and>(&mut self, r#and: Z, op: impl Operator) { + let (column_name, value) = r#and.value(); + + let and_ = String::from(" AND ") + + column_name + + op.as_str() + + "$" + + &(self.query.params.len() + 1).to_string() + + " "; - if self.and_clause != "" { - self.query.sql.push_str(&self.and_clause) + self.query.sql.push_str(&and_); + self.query.params.push(value); + } + + pub fn or>(&mut self, r#and: Z, op: impl Operator) { + let (column_name, value) = r#and.value(); + + let and_ = String::from(" OR ") + + column_name + + op.as_str() + + "$" + + &(self.query.params.len() + 1).to_string() + + " "; + + self.query.sql.push_str(&and_); + self.query.params.push(value); + } + + pub fn and_values_in(&mut self, r#and: Z, values: &'a [Q]) + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + if values.is_empty() { + return; } - if self.in_clause.is_empty() { - for value in self.in_clause { - self.query.sql.push_str(&value.to_string()) + self.query + .sql + .push_str(&format!(" AND {} IN (", r#and.as_str())); + + let mut counter = 1; + values.iter().for_each(|qp| { + if values.len() != counter { + self.query + .sql + .push_str(&format!("${}, ", self.query.params.len())); + counter += 1; + } else { + self.query + .sql + .push_str(&format!("${}", self.query.params.len())); } - } + self.query.params.push(qp) + }); - if self.order_by_clause != "" { - self.query.sql.push_str(&self.order_by_clause) + self.query.sql.push_str(") "); + } + + fn or_values_in(&mut self, r#or: Z, values: &'a [Q]) + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + if values.is_empty() { + return; } - self.query.sql.push(';'); + self.query + .sql + .push_str(&format!(" OR {} IN (", r#or.as_str())); - let result = T::query( - self.query.sql.clone(), - self.query.params, - self.datasource_name - ).await; + let mut counter = 1; + values.iter().for_each(|qp| { + if values.len() != counter { + self.query + .sql + .push_str(&format!("${}, ", self.query.params.len())); + counter += 1; + } else { + self.query + .sql + .push_str(&format!("${}", self.query.params.len())); + } + self.query.params.push(qp) + }); - if let Err(error) = result { - Err(error) - } else { Ok(result.ok().unwrap().get_entities::()) } + self.query.sql.push_str(") "); } - pub fn new(query: Query<'a, T>, datasource_name: &'a str) -> Self { + #[inline] + pub fn order_by>(&mut self, order_by: Z, desc: bool) { + self.query.sql.push_str( + &(format!( + " ORDER BY {}{}", + order_by.as_str(), + if desc { " DESC " } else { "" } + )), + ); + } +} + +#[derive(Debug, Clone)] +pub struct SelectQueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + _inner: QueryBuilder<'a, T>, +} + +impl<'a, T> SelectQueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + /// Generates a new public instance of the [`SelectQueryBuilder`] + pub fn new(table_schema_data: &str, datasource_name: &'a str) -> Self { Self { - query, - where_clause: String::new(), - and_clause: String::new(), - in_clause: &[], - order_by_clause: String::new(), - set_clause: String::new(), - datasource_name + _inner: QueryBuilder::::new( + Query::new(format!("SELECT * FROM {table_schema_data}")), + datasource_name, + ), } } - pub fn r#where>(mut self, r#where: Z, comp: Comp) -> Self { - let values = r#where.value() - .to_string() - .split(" ") - .map( |el| String::from(el)) - .collect::>(); + /// Launches the generated query to the database pointed by the + /// selected datasource + #[inline] + pub async fn query( + &'a mut self, + ) -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> { + self._inner.query().await + } - let where_ = values.get(0).unwrap().to_string() + - &comp.as_string()[..] + "'" + - values.get(1).unwrap() + "'"; - - self.where_clause.push_str( - &*(String::from(" WHERE ") + where_.as_str()) - ); - - self - } - - pub fn and>(mut self, r#and: Z, comp: Comp) -> Self { - let values = r#and.value() - .to_string() - .split(" ") - .map( |el| String::from(el)) - .collect::>(); - - let where_ = values.get(0).unwrap().to_string() + - &comp.as_string()[..] + "'" + - values.get(1).unwrap() + "'"; - - self.where_clause.push_str( - &*(String::from(" AND ") + where_.as_str()) - ); + /// Adds a *LEFT JOIN* SQL statement to the underlying + /// [`Query`] holded by the [`QueryBuilder`], where: + /// + /// * `join_table` - The table target of the join operation + /// * `col1` - The left side of the ON operator for the join + /// * `col2` - The right side of the ON operator for the join + /// + /// > Note: The order on the column paramenters is irrelevant + pub fn left_join(&mut self, join_table: &str, col1: &str, col2: &str) -> &mut Self { + self._inner + .query + .sql + .push_str(&format!(" LEFT JOIN {join_table} ON {col1} = {col2}")); + self + } + /// Adds a *RIGHT JOIN* SQL statement to the underlying + /// [`Query`] holded by the [`QueryBuilder`], where: + /// + /// * `join_table` - The table target of the join operation + /// * `col1` - The left side of the ON operator for the join + /// * `col2` - The right side of the ON operator for the join + /// + /// > Note: The order on the column paramenters is irrelevant + pub fn inner_join(&mut self, join_table: &str, col1: &str, col2: &str) -> &mut Self { + self._inner + .query + .sql + .push_str(&format!(" INNER JOIN {join_table} ON {col1} = {col2}")); self - } + } - pub fn r#in(mut self, in_values: &'a[Box]) -> Self { - self.in_clause = in_values; + /// Adds a *RIGHT JOIN* SQL statement to the underlying + /// [`Query`] holded by the [`QueryBuilder`], where: + /// + /// * `join_table` - The table target of the join operation + /// * `col1` - The left side of the ON operator for the join + /// * `col2` - The right side of the ON operator for the join + /// + /// > Note: The order on the column paramenters is irrelevant + pub fn right_join(&mut self, join_table: &str, col1: &str, col2: &str) -> &mut Self { + self._inner + .query + .sql + .push_str(&format!(" RIGHT JOIN {join_table} ON {col1} = {col2}")); self - } + } - pub fn order_by>(mut self, order_by: Z, desc: bool) -> Self { - let desc = if desc { String::from(" DESC ") - } else { "".to_owned() }; + /// Adds a *FULL JOIN* SQL statement to the underlying + /// [`Query`] holded by the [`QueryBuilder`], where: + /// + /// * `join_table` - The table target of the join operation + /// * `col1` - The left side of the ON operator for the join + /// * `col2` - The right side of the ON operator for the join + /// + /// > Note: The order on the column paramenters is irrelevant + pub fn full_join(&mut self, join_table: &str, col1: &str, col2: &str) -> &mut Self { + self._inner + .query + .sql + .push_str(&format!(" FULL JOIN {join_table} ON {col1} = {col2}")); + self + } +} - self.order_by_clause.push_str( - &*( - String::from(" ORDER BY ") + - order_by.field_name_as_str().as_str() + - &desc - ) - ); +impl<'a, T> ops::QueryBuilder<'a, T> for SelectQueryBuilder<'a, T> +where + T: Debug + CrudOperations + Transaction + RowMapper + Send, +{ + #[inline] + fn read_sql(&'a self) -> &'a str { + self._inner.query.sql.as_str() + } + + #[inline(always)] + fn push_sql(&mut self, sql: &str) { + self._inner.query.sql.push_str(sql); + } + + #[inline] + fn r#where>( + &mut self, + r#where: Z, + op: impl Operator, + ) -> &mut Self { + self._inner.r#where(r#where, op); + self + } + + #[inline] + fn and>(&mut self, column: Z, op: impl Operator) -> &mut Self { + self._inner.and(column, op); + self + } + + #[inline] + fn and_values_in(&mut self, r#and: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + self._inner.and_values_in(and, values); + self + } + + #[inline] + fn or>(&mut self, column: Z, op: impl Operator) -> &mut Self { + self._inner.or(column, op); self } - /// The SQL `SET` clause to especify the columns that must be updated in the sentence - pub fn set(mut self, columns: &'a[(Z, S)]) -> Self - where - Z: FieldIdentifier + Clone, - S: ToString + #[inline] + fn or_values_in(&mut self, r#and: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + self._inner.or_values_in(and, values); + self + } + + #[inline] + fn order_by>(&mut self, order_by: Z, desc: bool) -> &mut Self { + self._inner.order_by(order_by, desc); + self + } +} + +/// Contains the specific database operations of the *UPDATE* SQL statements. +/// +/// * `set` - To construct a new `SET` clause to determine the columns to +/// update with the provided values +#[derive(Debug, Clone)] +pub struct UpdateQueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + _inner: QueryBuilder<'a, T>, +} + +impl<'a, T> UpdateQueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + /// Generates a new public instance of the [`UpdateQueryBuilder`] + pub fn new(table_schema_data: &str, datasource_name: &'a str) -> Self { + Self { + _inner: QueryBuilder::::new( + Query::new(format!("UPDATE {table_schema_data}")), + datasource_name, + ), + } + } + + /// Launches the generated query to the database pointed by the + /// selected datasource + #[inline] + pub async fn query( + &'a mut self, + ) -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> { + self._inner.query().await + } + + /// Creates an SQL `SET` clause to especify the columns that must be updated in the sentence + pub fn set(&mut self, columns: &'a [(Z, Q)]) -> &mut Self + where + Z: FieldIdentifier + Clone, + Q: QueryParameters<'a>, { - if columns.len() == 0 { + if columns.is_empty() { return self; - } else if columns.len() > 0 { - self.set_clause.push_str(" SET ") } + if self._inner.query.sql.contains("SET") { + panic!( + "\n{}", + String::from("\t[PANIC!] - Don't use chained calls of the .set(...) method. ") + + "\n\tPass all the values in a unique call within the 'columns' " + + "array of tuples parameter\n" + ) + } + + let cap = columns.len() * 50; // Reserving an enought initial capacity per set clause + let mut set_clause = String::with_capacity(cap); + set_clause.push_str(" SET "); for (idx, column) in columns.iter().enumerate() { - if idx + 1 == columns.len() { - self.set_clause.push_str( - &(column.0.clone().field_name_as_str().to_owned() + "=" + "'" + column.1.to_string().as_str() + "'") - ) - } else { - self.set_clause.push_str( - &(column.0.clone().field_name_as_str().to_owned() + "=" + "'" + column.1.to_string().as_str() + "', ") - ) + set_clause.push_str(&format!( + "{} = ${}", + column.0.as_str(), + self._inner.query.params.len() + 1 + )); + + if idx < columns.len() - 1 { + set_clause.push_str(", "); } + self._inner.query.params.push(&column.1); } + + self._inner.query.sql.push_str(&set_clause); + self + } +} + +impl<'a, T> ops::QueryBuilder<'a, T> for UpdateQueryBuilder<'a, T> +where + T: Debug + CrudOperations + Transaction + RowMapper + Send, +{ + #[inline] + fn read_sql(&'a self) -> &'a str { + self._inner.query.sql.as_str() + } + + #[inline(always)] + fn push_sql(&mut self, sql: &str) { + self._inner.query.sql.push_str(sql); + } + + #[inline] + fn r#where>( + &mut self, + r#where: Z, + op: impl Operator, + ) -> &mut Self { + self._inner.r#where(r#where, op); self } -} \ No newline at end of file + + #[inline] + fn and>(&mut self, column: Z, op: impl Operator) -> &mut Self { + self._inner.and(column, op); + self + } + + #[inline] + fn and_values_in(&mut self, r#and: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + self._inner.and_values_in(and, values); + self + } + + #[inline] + fn or>(&mut self, column: Z, op: impl Operator) -> &mut Self { + self._inner.or(column, op); + self + } + + #[inline] + fn or_values_in(&mut self, r#or: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + self._inner.or_values_in(or, values); + self + } + + #[inline] + fn order_by>(&mut self, order_by: Z, desc: bool) -> &mut Self { + self._inner.order_by(order_by, desc); + self + } +} + +/// Contains the specific database operations associated with the +/// *DELETE* SQL statements. +/// +/// * `set` - To construct a new `SET` clause to determine the columns to +/// update with the provided values +#[derive(Debug, Clone)] +pub struct DeleteQueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + _inner: QueryBuilder<'a, T>, +} + +impl<'a, T> DeleteQueryBuilder<'a, T> +where + T: CrudOperations + Transaction + RowMapper, +{ + /// Generates a new public instance of the [`DeleteQueryBuilder`] + pub fn new(table_schema_data: &str, datasource_name: &'a str) -> Self { + Self { + _inner: QueryBuilder::::new( + Query::new(format!("DELETE FROM {table_schema_data}")), + datasource_name, + ), + } + } + + /// Launches the generated query to the database pointed by the + /// selected datasource + #[inline] + pub async fn query( + &'a mut self, + ) -> Result, Box<(dyn std::error::Error + Sync + Send + 'static)>> { + self._inner.query().await + } +} + +impl<'a, T> ops::QueryBuilder<'a, T> for DeleteQueryBuilder<'a, T> +where + T: Debug + CrudOperations + Transaction + RowMapper + Send, +{ + #[inline] + fn read_sql(&'a self) -> &'a str { + self._inner.query.sql.as_str() + } + + #[inline(always)] + fn push_sql(&mut self, sql: &str) { + self._inner.query.sql.push_str(sql); + } + + #[inline] + fn r#where>( + &mut self, + r#where: Z, + op: impl Operator, + ) -> &mut Self { + self._inner.r#where(r#where, op); + self + } + + #[inline] + fn and>(&mut self, column: Z, op: impl Operator) -> &mut Self { + self._inner.and(column, op); + self + } + + #[inline] + fn and_values_in(&mut self, r#and: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + self._inner.or_values_in(and, values); + self + } + + #[inline] + fn or>(&mut self, column: Z, op: impl Operator) -> &mut Self { + self._inner.or(column, op); + self + } + + #[inline] + fn or_values_in(&mut self, r#or: Z, values: &'a [Q]) -> &mut Self + where + Z: FieldIdentifier, + Q: QueryParameters<'a>, + { + self._inner.or_values_in(or, values); + self + } + + #[inline] + fn order_by>(&mut self, order_by: Z, desc: bool) -> &mut Self { + self._inner.order_by(order_by, desc); + self + } +} diff --git a/canyon_crud/src/result.rs b/canyon_crud/src/result.rs index c93fd101..a8e0ba45 100644 --- a/canyon_crud/src/result.rs +++ b/canyon_crud/src/result.rs @@ -1,74 +1,94 @@ -use std::{marker::PhantomData, fmt::Debug}; -use canyon_connection::{tokio_postgres, tiberius, canyon_database_connector::DatabaseType}; -use crate::{mapper::RowMapper, crud::Transaction}; - +use crate::{bounds::Row, crud::Transaction, mapper::RowMapper}; +use canyon_connection::{canyon_database_connector::DatabaseType, tiberius, tokio_postgres}; +use std::{fmt::Debug, marker::PhantomData}; /// Represents a database result after a query, by wrapping the `Vec` types that comes with the /// results after the query. /// and providing methods to deserialize this result into a **user defined struct** #[derive(Debug)] -pub struct DatabaseResult { - pub wrapper: Vec, +pub struct DatabaseResult { + pub postgres: Vec, pub sqlserver: Vec, pub active_ds: DatabaseType, - _phantom_data: std::marker::PhantomData + _phantom_data: std::marker::PhantomData, } -impl DatabaseResult { - +impl DatabaseResult { pub fn new_postgresql(result: Vec) -> Self { Self { - wrapper: result, - sqlserver: vec![], + postgres: result, + sqlserver: Vec::with_capacity(0), active_ds: DatabaseType::PostgreSql, - _phantom_data: PhantomData + _phantom_data: PhantomData, } } pub fn new_sqlserver(results: Vec) -> Self { Self { - wrapper: vec![], + postgres: Vec::with_capacity(0), sqlserver: results, active_ds: DatabaseType::SqlServer, - _phantom_data: PhantomData + _phantom_data: PhantomData, } } - /// Returns a Vec filled with instances of the type T. + /// Returns a [`Vec`] filled with instances of the type T. /// Z param it's used to constrait the types that can call this method. - /// + /// /// Also, provides a way to statically call `Z::deserialize_` method, - /// which it's a complex implementation used by the macros to automatically + /// which it's the implementation used by the macros to automatically /// map database columns into the fields for T. - pub fn get_entities + Debug>(&self) -> Vec - where T: Transaction + pub fn get_entities>(&self) -> Vec + where + T: Transaction, { match self.active_ds { - DatabaseType::PostgreSql => self.from_postgresql::(), - DatabaseType::SqlServer => self.from_sql_server::(), + DatabaseType::PostgreSql => self.map_from_postgresql::(), + DatabaseType::SqlServer => self.map_from_sql_server::(), } } - fn from_postgresql + Debug>(&self) -> Vec - where T: Transaction + fn map_from_postgresql>(&self) -> Vec + where + T: Transaction, { let mut results = Vec::new(); - - self.wrapper.iter().for_each( |row| { - results.push( Z::deserialize_postgresql( row ) ) - }); + + self.postgres + .iter() + .for_each(|row| results.push(Z::deserialize_postgresql(row))); results } - fn from_sql_server + Debug>(&self) -> Vec - where T: Transaction + fn map_from_sql_server>(&self) -> Vec + where + T: Transaction, { let mut results = Vec::new(); - - self.sqlserver.iter().for_each( |row| { - results.push( Z::deserialize_sqlserver( row ) ) - }); + + self.sqlserver + .iter() + .for_each(|row| results.push(Z::deserialize_sqlserver(row))); + + results + } + + pub fn as_canyon_rows(&self) -> Vec<&dyn Row> { + let mut results = Vec::new(); + + match self.active_ds { + DatabaseType::PostgreSql => { + self.postgres + .iter() + .for_each(|row| results.push(row as &dyn Row)); + } + DatabaseType::SqlServer => { + self.sqlserver + .iter() + .for_each(|row| results.push(row as &dyn Row)); + } + }; results } @@ -81,8 +101,8 @@ impl DatabaseResult { /// Returns how many rows contains the result of the query pub fn number_of_results(&self) -> usize { match self.active_ds { - DatabaseType::PostgreSql => self.wrapper.len(), + DatabaseType::PostgreSql => self.postgres.len(), DatabaseType::SqlServer => self.sqlserver.len(), } } -} \ No newline at end of file +} diff --git a/canyon_macros/Cargo.toml b/canyon_macros/Cargo.toml index 021b7ac7..1af2a4f4 100755 --- a/canyon_macros/Cargo.toml +++ b/canyon_macros/Cargo.toml @@ -1,8 +1,11 @@ [package] name = "canyon_macros" -version = "1.0.0" +version = "0.0.1" edition = "2021" +[lib] +proc-macro = true + [dependencies] syn = { version = "1.0.86", features = ["full"] } quote = "1.0.9" @@ -10,10 +13,6 @@ proc-macro2 = "1.0.27" futures = "0.3.21" tokio = { version = "1.9.0", features = ["full"] } - -canyon_observer = { path = "../canyon_observer" } -canyon_manager = { path = "../canyon_manager" } -canyon_crud = { path = "../canyon_crud" } - -[lib] -proc-macro = true +canyon_observer = { version = "0.0.1", path = "../canyon_observer" } +canyon_crud = { version = "0.0.1", path = "../canyon_crud" } +canyon_connection = { version = "0.0.1", path = "../canyon_connection" } diff --git a/canyon_macros/src/canyon_macro.rs b/canyon_macros/src/canyon_macro.rs index 88979249..ebc02629 100644 --- a/canyon_macros/src/canyon_macro.rs +++ b/canyon_macros/src/canyon_macro.rs @@ -1,4 +1,4 @@ -//! Provides helpers to build the #[canyon] procedural like attribute macro +//! Provides helpers to build the `#[canyon_macros::canyon]` procedural like attribute macro use proc_macro::TokenStream as TokenStream1; use proc_macro2::{Ident, TokenStream}; @@ -12,58 +12,78 @@ use syn::{Lit, NestedMeta}; /// Utilery struct for wrapping the content and result of parsing the attributes on the `canyon` macro pub struct CanyonMacroAttributes { pub allowed_migrations: bool, - pub error: Option + pub error: Option, } /// Parses the [`syn::NestedMeta::Meta`] or [`syn::NestedMeta::Lit`] attached to the `canyon` macro pub fn parse_canyon_macro_attributes(_meta: &Vec) -> CanyonMacroAttributes { - let mut res = CanyonMacroAttributes { - allowed_migrations: false, - error: None + let mut res = CanyonMacroAttributes { + allowed_migrations: false, + error: None, }; for nested_meta in _meta { match nested_meta { syn::NestedMeta::Meta(m) => determine_allowed_attributes(m, &mut res), syn::NestedMeta::Lit(lit) => match lit { - syn::Lit::Str(ref l) => res.error = Some(report_literals_not_allowed(&l.value(), &lit)), - syn::Lit::ByteStr(ref l) => res.error = Some(report_literals_not_allowed(&String::from_utf8_lossy(&l.value()), &lit)), - syn::Lit::Byte(ref l) => res.error = Some(report_literals_not_allowed(&l.value().to_string(), &lit)), - syn::Lit::Char(ref l) => res.error = Some(report_literals_not_allowed(&l.value().to_string(), &lit)), - syn::Lit::Int(ref l) => res.error = Some(report_literals_not_allowed(&l.to_string(), &lit)), - syn::Lit::Float(ref l) => res.error = Some(report_literals_not_allowed(&l.to_string(), &lit)), - syn::Lit::Bool(ref l) => res.error = Some(report_literals_not_allowed(&l.value().to_string(), &lit)) , - syn::Lit::Verbatim(ref l) => res.error = Some(report_literals_not_allowed(&l.to_string(), &lit)) - } + syn::Lit::Str(ref l) => { + res.error = Some(report_literals_not_allowed(&l.value(), lit)) + } + syn::Lit::ByteStr(ref l) => { + res.error = Some(report_literals_not_allowed( + &String::from_utf8_lossy(&l.value()), + lit, + )) + } + syn::Lit::Byte(ref l) => { + res.error = Some(report_literals_not_allowed(&l.value().to_string(), lit)) + } + syn::Lit::Char(ref l) => { + res.error = Some(report_literals_not_allowed(&l.value().to_string(), lit)) + } + syn::Lit::Int(ref l) => { + res.error = Some(report_literals_not_allowed(&l.to_string(), lit)) + } + syn::Lit::Float(ref l) => { + res.error = Some(report_literals_not_allowed(&l.to_string(), lit)) + } + syn::Lit::Bool(ref l) => { + res.error = Some(report_literals_not_allowed(&l.value().to_string(), lit)) + } + syn::Lit::Verbatim(ref l) => { + res.error = Some(report_literals_not_allowed(&l.to_string(), lit)) + } + }, } - }; + } res } - /// Determines whenever a [`syn::NestedMeta::Meta`] it's classified as a valid argument of the `canyon` macro fn determine_allowed_attributes(meta: &syn::Meta, cma: &mut CanyonMacroAttributes) { - const ALLOWED_ATTRS: [&'static str; 1] = ["enable_migrations"]; - + const ALLOWED_ATTRS: [&str; 1] = ["enable_migrations"]; + let attr_ident = meta.path().get_ident().unwrap(); let attr_ident_str = attr_ident.to_string(); - + if attr_ident_str.as_str() == "enable_migrations" { cma.allowed_migrations = true; } else { let error = syn::Error::new_spanned( - Ident::new(&attr_ident_str, attr_ident.span().into()), - format!( + Ident::new(&attr_ident_str, attr_ident.span()), + format!( "No `{attr_ident_str}` arguments allowed in the `Canyon` macro attributes.\n\ - Allowed ones are: {:?}", ALLOWED_ATTRS - ) - ).into_compile_error(); + Allowed ones are: {ALLOWED_ATTRS:?}" + ), + ) + .into_compile_error(); cma.error = Some( - quote! { - #error - fn main() {} - }.into() + quote! { + #error + fn main() {} + } + .into(), ) } } @@ -71,48 +91,35 @@ fn determine_allowed_attributes(meta: &syn::Meta, cma: &mut CanyonMacroAttribute /// Creates a custom error for report not allowed literals on the attribute /// args of the `canyon` proc macro fn report_literals_not_allowed(ident: &str, s: &Lit) -> TokenStream1 { - let error = syn::Error::new_spanned(Ident::new(ident, s.span().into()), - "No literals allowed in the `Canyon` macro" - ).into_compile_error(); - + let error = syn::Error::new_spanned( + Ident::new(ident, s.span()), + "No literals allowed in the `Canyon` macro", + ) + .into_compile_error(); + quote! { - #error - fn main() {} - }.into() + #error + fn main() {} + } + .into() } - /// Creates a TokenScream that is used to load the data generated at compile-time /// by the `CanyonManaged` macros again on the queries register pub fn wire_queries_to_execute(canyon_manager_tokens: &mut Vec) { - let mut queries = String::new(); + let data = QUERIES_TO_EXECUTE.lock().unwrap(); + let data_to_wire = data.iter().map(|(key, value)| { + quote! { hm.insert(#key, vec![#(#value),*]); } + }); - for query in QUERIES_TO_EXECUTE.lock().unwrap().iter() { - queries.push_str(&(query.to_owned() + "->")); - } - let tokens = quote! { - use canyon_sql::canyon_observer::{ - QUERIES_TO_EXECUTE, - postgresql::migrations::DatabaseSyncOperations - }; - - - *QUERIES_TO_EXECUTE.lock().unwrap() = #queries - .split("->") - .map(str::to_string) - .collect::>(); - - - if QUERIES_TO_EXECUTE.lock().unwrap().len() > 1 { - // > 1 because there's an [""] entry - for element in QUERIES_TO_EXECUTE.lock().unwrap().iter() { - println!("\t{}", element) - } - } + use std::collections::HashMap; + use canyon_sql::migrations::processor::MigrationsProcessor; - DatabaseSyncOperations::from_query_register().await; + let mut hm: HashMap<&str, Vec<&str>> = HashMap::new(); + #(#data_to_wire)*; + MigrationsProcessor::from_query_register(&hm).await; }; - - canyon_manager_tokens.push(tokens) + + canyon_manager_tokens.push(tokens) } diff --git a/canyon_macros/src/lib.rs b/canyon_macros/src/lib.rs index a5ef886b..3b7a1f02 100755 --- a/canyon_macros/src/lib.rs +++ b/canyon_macros/src/lib.rs @@ -4,134 +4,179 @@ mod canyon_macro; mod query_operations; mod utils; -use proc_macro::{TokenStream as CompilerTokenStream, Span}; +use canyon_connection::CANYON_TOKIO_RUNTIME; +use proc_macro::{Span, TokenStream as CompilerTokenStream}; use proc_macro2::{Ident, TokenStream}; use quote::{quote, ToTokens}; -use syn::{ - DeriveInput, Fields, Visibility, Type -}; +use syn::{DeriveInput, Fields, Type, Visibility}; use query_operations::{ + delete::{generate_delete_query_tokens, generate_delete_tokens}, + insert::{generate_insert_tokens, generate_multiple_insert_tokens}, select::{ - generate_find_all_unchecked_tokens, - generate_find_all_tokens, - generate_find_all_query_tokens, - generate_count_tokens, - generate_find_by_pk_tokens, - generate_find_by_foreign_key_tokens, - generate_find_by_reverse_foreign_key_tokens - - }, - insert::{ - generate_insert_tokens, - generate_multiple_insert_tokens - }, - update::{ - generate_update_tokens, - generate_update_query_tokens + generate_count_tokens, generate_find_all_query_tokens, generate_find_all_tokens, + generate_find_all_unchecked_tokens, generate_find_by_foreign_key_tokens, + generate_find_by_pk_tokens, generate_find_by_reverse_foreign_key_tokens, }, - delete::{ - generate_delete_tokens, - generate_delete_query_tokens - } + update::{generate_update_query_tokens, generate_update_tokens}, }; -use utils::{ - function_parser::FunctionParser, - macro_tokens::MacroTokens, helpers -}; -use canyon_macro::{wire_queries_to_execute, parse_canyon_macro_attributes}; - -use canyon_manager::manager::{ - manager_builder::{ - generate_user_struct, - generate_enum_with_fields, - generate_enum_with_fields_values - }, - entity::CanyonEntity +use canyon_macro::{parse_canyon_macro_attributes, wire_queries_to_execute}; +use utils::{function_parser::FunctionParser, helpers, macro_tokens::MacroTokens}; + +use canyon_observer::{ + manager::{ + entity::CanyonEntity, + manager_builder::{ + generate_enum_with_fields, generate_enum_with_fields_values, generate_user_struct, + }, + }, + migrations::handler::Migrations, }; use canyon_observer::{ + migrations::register_types::{CanyonRegisterEntity, CanyonRegisterEntityField}, CANYON_REGISTER_ENTITIES, - handler::CanyonHandler, - postgresql::register_types::{ - CanyonRegisterEntity, - CanyonRegisterEntityField - }, }; - -/// Macro for handling the entry point to the program. -/// +/// Macro for handling the entry point to the program. +/// /// Avoids the user to write the tokio proc_attribute and /// the async modifier to the main fn() -/// +/// /// Also, takes care about wire the necessary code that Canyon's need /// to run in order to check the provided code and in order to perform /// the necessary operations for the migrations #[proc_macro_attribute] -pub fn canyon(_meta: CompilerTokenStream, input: CompilerTokenStream) -> CompilerTokenStream { +pub fn main(_meta: CompilerTokenStream, input: CompilerTokenStream) -> CompilerTokenStream { let attrs = syn::parse_macro_input!(_meta as syn::AttributeArgs); // Parses the attributes declared in the arguments of this proc macro let attrs_parse_result = parse_canyon_macro_attributes(&attrs); if attrs_parse_result.error.is_some() { - return attrs_parse_result.error.unwrap() + return attrs_parse_result.error.unwrap(); } // Parses the function items that this attribute is attached to let func_res = syn::parse::(input); if func_res.is_err() { - return quote! { fn main() {} }.into() + return quote! { fn main() {} }.into(); } - - // TODO check if the `canyon` macro it's attached only to main? + + // TODO check if the `canyon` macro it's attached only to main? let func = func_res.ok().unwrap(); - let sign = func.clone().sig; - let body = func.clone().block.stmts; + let sign = func.sig; + let body = func.block.stmts; if attrs_parse_result.allowed_migrations { - // The migrations - let rt = tokio::runtime::Runtime::new().unwrap(); - rt.block_on(async { - CanyonHandler::run().await; + CANYON_TOKIO_RUNTIME.block_on(async { + canyon_connection::init_connections_cache().await; + Migrations::migrate().await; }); // The queries to execute at runtime in the managed state let mut queries_tokens: Vec = Vec::new(); wire_queries_to_execute(&mut queries_tokens); - + // The final code wired in main() quote! { - use canyon_sql::tokio; - #[tokio::main] - async #sign { - { - #(#queries_tokens)* - } - #(#body)* + #sign { + canyon_sql::runtime::CANYON_TOKIO_RUNTIME + .handle() + .block_on( async { + canyon_sql::runtime::init_connections_cache().await; + { + #(#queries_tokens)* + } + #(#body)* + } + ) } - }.into() + } + .into() } else { quote! { - use canyon_sql::tokio; - #[tokio::main] - async #sign { - #(#body)* + #sign { + canyon_sql::runtime::CANYON_TOKIO_RUNTIME + .handle() + .block_on( async { + canyon_sql::runtime::init_connections_cache().await; + #(#body)* + } + ) } - }.into() + } + .into() } +} + +#[proc_macro_attribute] +/// Wraps the [`test`] proc macro in a convenient way to run tests within +/// the tokio's current reactor +pub fn canyon_tokio_test( + _meta: CompilerTokenStream, + input: CompilerTokenStream, +) -> CompilerTokenStream { + let func_res = syn::parse::(input); + if func_res.is_err() { + quote! { fn non_valid_test_fn() {} }.into() + } else { + let func = func_res.ok().unwrap(); + let sign = func.sig; + let body = func.block.stmts; + let attrs = func.attrs; + quote! { + #[test] + #(#attrs)* + #sign { + canyon_sql::runtime::CANYON_TOKIO_RUNTIME + .handle() + .block_on( async { + canyon_sql::runtime::init_connections_cache().await; + #(#body)* + }); + } + } + .into() + } } +/// Generates the enums that contains the `TypeFields` and `TypeFieldsValues` +/// that the querybuilder requires for construct its queries +#[proc_macro_derive(Fields)] +pub fn querybuilder_fields(input: CompilerTokenStream) -> CompilerTokenStream { + let entity_res = syn::parse::(input); + + if entity_res.is_err() { + return entity_res + .expect_err("Unexpected error parsing the struct") + .into_compile_error() + .into(); + } + + // No errors detected on the parsing, so we can safely unwrap the parse result + let entity = entity_res.expect("Unexpected error parsing the struct"); + let _generated_enum_type_for_fields = generate_enum_with_fields(&entity); + let _generated_enum_type_for_fields_values = generate_enum_with_fields_values(&entity); + quote! { + use canyon_sql::crud::bounds::QueryParameters; + #_generated_enum_type_for_fields + #_generated_enum_type_for_fields_values + } + .into() +} /// Takes data from the struct annotated with the `canyon_entity` macro to fill the Canyon Register /// where lives the data that Canyon needs to work. -/// +/// /// Also, it's the responsible of generate the tokens for all the `Crud` methods available over /// your type #[proc_macro_attribute] -pub fn canyon_entity(_meta: CompilerTokenStream, input: CompilerTokenStream) -> CompilerTokenStream { +pub fn canyon_entity( + _meta: CompilerTokenStream, + input: CompilerTokenStream, +) -> CompilerTokenStream { let attrs = syn::parse_macro_input!(_meta as syn::AttributeArgs); let mut table_name: Option<&str> = None; @@ -145,30 +190,38 @@ pub fn canyon_entity(_meta: CompilerTokenStream, input: CompilerTokenStream) -> syn::NestedMeta::Meta(m) => { match m { syn::Meta::NameValue(nv) => { - // println!("Found meta nv: {:?}", nv.path.get_ident()); - // println!("Found meta nv: {:?}", nv.lit); - let attr_arg_ident = nv.path.get_ident() + let attr_arg_ident = nv + .path + .get_ident() .expect("Something went wrong parsing the `table_name` argument") .to_string(); - + if attr_arg_ident == "table_name" || attr_arg_ident == "schema" { table_name = Some(Box::leak(attr_arg_ident.into_boxed_str())); match nv.lit { - syn::Lit::Str(ref l) => schema_name = Some(Box::leak(l.value().into_boxed_str())), + syn::Lit::Str(ref l) => { + schema_name = Some(Box::leak(l.value().into_boxed_str())) + } _ => { parsing_attribute_error = Some(syn::Error::new( Span::call_site().into(), - format!("Only string literals are valid values for the attributes") + "Only string literals are valid values for the attributes" ).into_compile_error()); } } } else { - parsing_attribute_error = Some(syn::Error::new( - Span::call_site().into(), - format!("Argument: `{:?}` are not allowed in the canyon_macro attr", &attr_arg_ident) - ).into_compile_error()); + parsing_attribute_error = Some( + syn::Error::new( + Span::call_site().into(), + format!( + "Argument: `{:?}` are not allowed in the canyon_macro attr", + &attr_arg_ident + ), + ) + .into_compile_error(), + ); } - }, + } _ => { parsing_attribute_error = Some(syn::Error::new( Span::call_site().into(), @@ -176,76 +229,72 @@ pub fn canyon_entity(_meta: CompilerTokenStream, input: CompilerTokenStream) -> ).into_compile_error()); } } - }, + } syn::NestedMeta::Lit(_) => { parsing_attribute_error = Some(syn::Error::new( Span::call_site().into(), "No literal values allowed on the `canyon_macros::canyon_entity` proc macro" ).into_compile_error()); - }, + } } } let entity_res = syn::parse::(input); if entity_res.is_err() { - return entity_res.err() - .expect("Unexpected error parsing the struct") + return entity_res + .expect_err("Unexpected error parsing the struct") .into_compile_error() - .into() + .into(); } // No errors detected on the parsing, so we can safely unwrap the parse result - let entity = entity_res.ok().expect("Unexpected error parsing the struct"); - + let entity = entity_res.expect("Unexpected error parsing the struct"); // Generate the bits of code that we should give back to the compiler let generated_user_struct = generate_user_struct(&entity); - let _generated_enum_type_for_fields = generate_enum_with_fields(&entity); - let _generated_enum_type_for_fields_values = generate_enum_with_fields_values(&entity); // The identifier of the entities - let mut new_entity = CanyonRegisterEntity::new(); - let e = Box::leak( - entity.struct_name.to_string() - .into_boxed_str() - ); + let mut new_entity = CanyonRegisterEntity::default(); + let e = Box::leak(entity.struct_name.to_string().into_boxed_str()); new_entity.entity_name = e; new_entity.user_table_name = table_name; new_entity.user_schema_name = schema_name; // The entity fields for field in entity.fields.iter() { - let mut new_entity_field = CanyonRegisterEntityField::new(); - new_entity_field.field_name = field.name.to_string(); - new_entity_field.field_type = field.get_field_type_as_string().replace(" ", ""); - - field.attributes.iter().for_each( - |attr| - new_entity_field.annotations.push(attr.get_as_string()) - ); + let mut new_entity_field = CanyonRegisterEntityField { + field_name: field.name.to_string(), + field_type: field.get_field_type_as_string().replace(' ', ""), + ..Default::default() + }; + + field + .attributes + .iter() + .for_each(|attr| new_entity_field.annotations.push(attr.get_as_string())); new_entity.entity_fields.push(new_entity_field); } // Fill the register with the data of the attached struct - CANYON_REGISTER_ENTITIES.lock() + CANYON_REGISTER_ENTITIES + .lock() .expect("Error adquiring Mutex guard on Canyon Entity macro") .push(new_entity); // Assemble everything let tokens = quote! { #generated_user_struct - #_generated_enum_type_for_fields - #_generated_enum_type_for_fields_values }; - + // Pass the result back to the compiler if let Some(macro_error) = parsing_attribute_error { - quote! { + quote! { #macro_error - #generated_user_struct - }.into() - } else{ + #generated_user_struct + } + .into() + } else { tokens.into() } } @@ -259,14 +308,14 @@ pub fn crud_operations(input: proc_macro::TokenStream) -> proc_macro::TokenStrea // that we can manipulate // Calls the helper struct to build the tokens that generates the final CRUD methos - let ast: DeriveInput = syn::parse(input) - .expect("Error parsing `Canyon Entity for generate the CRUD methods"); + let ast: DeriveInput = + syn::parse(input).expect("Error parsing `Canyon Entity for generate the CRUD methods"); let macro_data = MacroTokens::new(&ast); let table_name_res = helpers::table_schema_parser(¯o_data); - + let table_schema_data = if let Err(err) = table_name_res { - return err.into() + return err.into(); } else { table_name_res.ok().unwrap() }; @@ -275,68 +324,66 @@ pub fn crud_operations(input: proc_macro::TokenStream) -> proc_macro::TokenStrea impl_crud_operations_trait_for_struct(¯o_data, table_schema_data) } - -fn impl_crud_operations_trait_for_struct(macro_data: &MacroTokens<'_>, table_schema_data: String) -> proc_macro::TokenStream { +fn impl_crud_operations_trait_for_struct( + macro_data: &MacroTokens<'_>, + table_schema_data: String, +) -> proc_macro::TokenStream { let ty = macro_data.ty; // Builds the find_all() query - let _find_all_unchecked_tokens = generate_find_all_unchecked_tokens(¯o_data, &table_schema_data); + let _find_all_unchecked_tokens = + generate_find_all_unchecked_tokens(macro_data, &table_schema_data); // Builds the find_all_result() query - let _find_all_tokens = generate_find_all_tokens(¯o_data, &table_schema_data); + let _find_all_tokens = generate_find_all_tokens(macro_data, &table_schema_data); // Builds the find_all_query() query as a QueryBuilder - let _find_all_query_tokens = generate_find_all_query_tokens(¯o_data, &table_schema_data); - + let _find_all_query_tokens = generate_find_all_query_tokens(macro_data, &table_schema_data); + // Builds a COUNT(*) query over some table - let _count_tokens = generate_count_tokens(¯o_data, &table_schema_data); - + let _count_tokens = generate_count_tokens(macro_data, &table_schema_data); + // Builds the find_by_pk() query - let _find_by_pk_tokens = generate_find_by_pk_tokens(¯o_data, &table_schema_data); - + let _find_by_pk_tokens = generate_find_by_pk_tokens(macro_data, &table_schema_data); + // Builds the insert() query - let _insert_tokens = generate_insert_tokens(¯o_data, &table_schema_data); + let _insert_tokens = generate_insert_tokens(macro_data, &table_schema_data); // Builds the insert_multi() query - let _insert_multi_tokens = generate_multiple_insert_tokens(¯o_data, &table_schema_data); - + let _insert_multi_tokens = generate_multiple_insert_tokens(macro_data, &table_schema_data); + // Builds the update() queries - let _update_tokens = generate_update_tokens(¯o_data, &table_schema_data); + let _update_tokens = generate_update_tokens(macro_data, &table_schema_data); // Builds the update() query as a QueryBuilder - let _update_query_tokens = generate_update_query_tokens(¯o_data, &table_schema_data); + let _update_query_tokens = generate_update_query_tokens(macro_data, &table_schema_data); // Builds the delete() queries - let _delete_tokens = generate_delete_tokens(¯o_data, &table_schema_data); + let _delete_tokens = generate_delete_tokens(macro_data, &table_schema_data); // Builds the delete() query as a QueryBuilder - let _delete_query_tokens = generate_delete_query_tokens(¯o_data, &table_schema_data); - + let _delete_query_tokens = generate_delete_query_tokens(macro_data, &table_schema_data); + // Search by foreign (d) key as Vec, cause Canyon supports multiple fields having FK annotation - let _search_by_fk_tokens: Vec<(TokenStream, TokenStream)> = generate_find_by_foreign_key_tokens(¯o_data); - let fk_method_signatures = _search_by_fk_tokens - .iter() - .map( |(sign, _)| { sign }); - let fk_method_implementations = _search_by_fk_tokens - .iter() - .map( |(_, m_impl)| { m_impl }); + let _search_by_fk_tokens: Vec<(TokenStream, TokenStream)> = + generate_find_by_foreign_key_tokens(macro_data); + let fk_method_signatures = _search_by_fk_tokens.iter().map(|(sign, _)| sign); + let fk_method_implementations = _search_by_fk_tokens.iter().map(|(_, m_impl)| m_impl); // The tokens for generating the methods that enable Canyon to retrive the child entities that are of T type // given a parent entity U: ForeignKeyable, as an associated function for the child type (T) - let _search_by_revese_fk_tokens: Vec<(TokenStream, TokenStream)> = generate_find_by_reverse_foreign_key_tokens(¯o_data, &table_schema_data); - let rev_fk_method_signatures = _search_by_revese_fk_tokens - .iter() - .map( |(sign, _)| { sign }); - let rev_fk_method_implementations = _search_by_revese_fk_tokens - .iter() - .map( |(_, m_impl)| { m_impl }); - + let _search_by_revese_fk_tokens: Vec<(TokenStream, TokenStream)> = + generate_find_by_reverse_foreign_key_tokens(macro_data, &table_schema_data); + let rev_fk_method_signatures = _search_by_revese_fk_tokens.iter().map(|(sign, _)| sign); + let rev_fk_method_implementations = + _search_by_revese_fk_tokens.iter().map(|(_, m_impl)| m_impl); + // The autogenerated name for the trait that holds the fk and rev fk searches let fk_trait_ident = proc_macro2::Ident::new( - &format!("{}FkOperations", &ty.to_string()), - proc_macro2::Span::call_site() + &format!("{}FkOperations", &ty.to_string()), + proc_macro2::Span::call_site(), ); let crud_operations_tokens = quote! { // The find_all_result impl #_find_all_tokens - + // The find_all impl #_find_all_unchecked_tokens @@ -368,99 +415,96 @@ fn impl_crud_operations_trait_for_struct(macro_data: &MacroTokens<'_>, table_sch #_delete_query_tokens }; - let tokens = if _search_by_fk_tokens.len() > 0 { + let tokens = if !_search_by_fk_tokens.is_empty() { quote! { - #[async_trait] - impl canyon_crud::crud::CrudOperations<#ty> for #ty { + #[canyon_sql::macros::async_trait] + impl canyon_sql::crud::CrudOperations<#ty> for #ty { #crud_operations_tokens } - - impl canyon_crud::crud::Transaction<#ty> for #ty {} - + + impl canyon_sql::crud::Transaction<#ty> for #ty {} + /// Hidden trait for generate the foreign key operations available /// in Canyon without have to define them before hand in CrudOperations /// because it's just imposible with the actual system (where the methods /// are generated dynamically based on some properties of the `foreign_key` /// annotation) - #[async_trait] - pub trait FkTestFkOperations - where T: - std::fmt::Debug + - canyon_sql::canyon_crud::crud::CrudOperations + - canyon_sql::canyon_crud::mapper::RowMapper - { + #[canyon_sql::macros::async_trait] + pub trait #fk_trait_ident<#ty> { #(#fk_method_signatures)* #(#rev_fk_method_signatures)* } - #[async_trait] - impl #fk_trait_ident<#ty> for #ty { + #[canyon_sql::macros::async_trait] + impl #fk_trait_ident<#ty> for #ty + where #ty: + std::fmt::Debug + + canyon_sql::crud::CrudOperations<#ty> + + canyon_sql::crud::RowMapper<#ty> + { #(#fk_method_implementations)* #(#rev_fk_method_implementations)* } } } else { quote! { - #[async_trait] - impl canyon_crud::crud::CrudOperations<#ty> for #ty { + #[canyon_sql::macros::async_trait] + impl canyon_sql::crud::CrudOperations<#ty> for #ty { #crud_operations_tokens } - - impl canyon_crud::crud::Transaction<#ty> for #ty {} - } + + impl canyon_sql::crud::Transaction<#ty> for #ty {} + } }; - tokens.into() } /// proc-macro for annotate struct fields that holds a foreign key relation. -/// +/// /// So basically, if you have some `ForeignKey` attribute, annotate the parent /// struct (where the ForeignKey table property points) with this macro /// to make it able to work with compound table relations #[proc_macro_derive(ForeignKeyable)] -pub fn implement_foreignkeyable_for_type(input: proc_macro::TokenStream) -> proc_macro::TokenStream { +pub fn implement_foreignkeyable_for_type( + input: proc_macro::TokenStream, +) -> proc_macro::TokenStream { // Gets the data from the AST let ast: DeriveInput = syn::parse(input).unwrap(); let ty = ast.ident; // Recovers the identifiers of the struct's members - let fields = filter_fields( - match ast.data { - syn::Data::Struct(ref s) => &s.fields, - _ => return syn::Error::new( - ty.span(), - "ForeignKeyable only works with Structs" - ).to_compile_error().into() + let fields = filter_fields(match ast.data { + syn::Data::Struct(ref s) => &s.fields, + _ => { + return syn::Error::new(ty.span(), "ForeignKeyable only works with Structs") + .to_compile_error() + .into() } - ); + }); - let field_idents = fields.iter() - .map( |(_vis, ident)| - { - let i = ident.to_string(); - quote! { - #i => Some(self.#ident.to_string()) - } - } - ); + let field_idents = fields.iter().map(|(_vis, ident)| { + let i = ident.to_string(); + quote! { + #i => Some(&self.#ident as &dyn canyon_sql::crud::bounds::QueryParameters<'_>) + } + }); let field_idents_cloned = field_idents.clone(); - - quote!{ - /// Implementation of the trait `ForeignKeyable` for the type + + quote! { + /// Implementation of the trait `ForeignKeyable` for the type /// calling this derive proc macro - impl canyon_sql::canyon_crud::bounds::ForeignKeyable for #ty { - fn get_fk_column<'a>(&self, column: &'a str) -> Option { + impl canyon_sql::crud::bounds::ForeignKeyable for #ty { + fn get_fk_column(&self, column: &str) -> Option<&dyn canyon_sql::crud::bounds::QueryParameters<'_>> { match column { #(#field_idents),*, _ => None } } } - /// Implementation of the trait `ForeignKeyable` for a reference of this type + /// Implementation of the trait `ForeignKeyable` for a reference of this type /// calling this derive proc macro - impl canyon_sql::canyon_crud::bounds::ForeignKeyable<&Self> for &#ty { - fn get_fk_column<'a>(&self, column: &'a str) -> Option { + impl canyon_sql::crud::bounds::ForeignKeyable<&Self> for &#ty { + fn get_fk_column<'a>(&self, column: &'a str) -> Option<&dyn canyon_sql::crud::bounds::QueryParameters<'_>> { match column { #(#field_idents_cloned),*, _ => None @@ -476,21 +520,20 @@ pub fn implement_row_mapper_for_type(input: proc_macro::TokenStream) -> proc_mac let ast: DeriveInput = syn::parse(input).unwrap(); // Recovers the identifiers of the struct's members - let fields = fields_with_types( - match ast.data { - syn::Data::Struct(ref s) => &s.fields, - _ => return syn::Error::new( - ast.ident.span(), - "CanyonMapper only works with Structs" - ).to_compile_error().into(), + let fields = fields_with_types(match ast.data { + syn::Data::Struct(ref s) => &s.fields, + _ => { + return syn::Error::new(ast.ident.span(), "CanyonMapper only works with Structs") + .to_compile_error() + .into() } - ); + }); // Here it's where the incoming values of the DatabaseResult are wired into a new // instance, mapping the fields of the type against the columns let init_field_values = fields.iter().map(|(_vis, ident, _ty)| { let ident_name = ident.to_string(); - quote! { + quote! { #ident: row.try_get(#ident_name) .expect(format!("Failed to retrieve the {} field", #ident_name).as_ref()) } @@ -498,88 +541,87 @@ pub fn implement_row_mapper_for_type(input: proc_macro::TokenStream) -> proc_mac let init_field_values_sqlserver = fields.iter().map(|(_vis, ident, ty)| { let ident_name = ident.to_string(); - let quote = if get_field_type_as_string(ty) == "String" { - quote! { + + if get_field_type_as_string(ty) == "String" { + quote! { #ident: row.get::<&str, &str>(#ident_name) .expect(format!("Failed to retrieve the `{}` field", #ident_name).as_ref()) .to_string() } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { + quote! { #ident: row.get::(#ident_name) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { + quote! { #ident: row.get::(#ident_name) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { + quote! { #ident: row.get::(#ident_name) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { + quote! { #ident: row.get::<&str, &str>(#ident_name) .map( |x| x.to_owned() ) } } else if get_field_type_as_string(ty) == "NaiveDate" { - quote! { - #ident: row.get::(#ident_name) + quote! { + #ident: row.get::(#ident_name) .expect(format!("Failed to retrieve the `{}` field", #ident_name).as_ref()) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { - #ident: row.get::(#ident_name) + quote! { + #ident: row.get::(#ident_name) } - } else if get_field_type_as_string(ty)== "NaiveTime" { - quote! { - #ident: row.get::(#ident_name) + } else if get_field_type_as_string(ty) == "NaiveTime" { + quote! { + #ident: row.get::(#ident_name) .expect(format!("Failed to retrieve the `{}` field", #ident_name).as_ref()) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { - #ident: row.get::(#ident_name) + quote! { + #ident: row.get::(#ident_name) } } else if get_field_type_as_string(ty) == "NaiveDateTime" { - quote! { - #ident: row.get::(#ident_name) + quote! { + #ident: row.get::(#ident_name) .expect(format!("Failed to retrieve the `{}` field", #ident_name).as_ref()) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { - #ident: row.get::(#ident_name) + quote! { + #ident: row.get::(#ident_name) } } else if get_field_type_as_string(ty) == "DateTime" { - quote! { - #ident: row.get::(#ident_name) + quote! { + #ident: row.get::(#ident_name) .expect(format!("Failed to retrieve the `{}` field", #ident_name).as_ref()) } } else if get_field_type_as_string(ty).replace(' ', "") == "Option" { - quote! { + quote! { #ident: row.get::(#ident_name) } } else { - quote! { + quote! { #ident: row.get::<#ty, &str>(#ident_name) .expect(format!("Failed to retrieve the `{}` field", #ident_name).as_ref()) } - }; - - quote + } }); // The type of the Struct let ty = ast.ident; let tokens = quote! { - impl canyon_sql::canyon_crud::mapper::RowMapper for #ty + impl canyon_sql::crud::RowMapper for #ty { - fn deserialize_postgresql(row: &canyon_sql::canyon_connection::tokio_postgres::Row) -> #ty { + fn deserialize_postgresql(row: &canyon_sql::db_clients::tokio_postgres::Row) -> #ty { Self { #(#init_field_values),* } } - fn deserialize_sqlserver(row: &canyon_sql::canyon_connection::tiberius::Row) -> #ty { + fn deserialize_sqlserver(row: &canyon_sql::db_clients::tiberius::Row) -> #ty { Self { #(#init_field_values_sqlserver),* } @@ -594,22 +636,20 @@ pub fn implement_row_mapper_for_type(input: proc_macro::TokenStream) -> proc_mac fn filter_fields(fields: &Fields) -> Vec<(Visibility, Ident)> { fields .iter() - .map(|field| - (field.vis.clone(), field.ident.as_ref().unwrap().clone()) - ) + .map(|field| (field.vis.clone(), field.ident.as_ref().unwrap().clone())) .collect::>() } fn fields_with_types(fields: &Fields) -> Vec<(Visibility, Ident, Type)> { fields .iter() - .map(|field| + .map(|field| { ( field.vis.clone(), field.ident.as_ref().unwrap().clone(), - field.ty.clone() - ) - ) + field.ty.clone(), + ) + }) .collect::>() } @@ -632,4 +672,4 @@ fn get_field_type_as_string(typ: &Type) -> String { Type::Verbatim(type_) => type_.to_token_stream().to_string(), _ => "".to_owned(), } -} \ No newline at end of file +} diff --git a/canyon_macros/src/query_operations/delete.rs b/canyon_macros/src/query_operations/delete.rs index 627d0402..16ef1ec4 100644 --- a/canyon_macros/src/query_operations/delete.rs +++ b/canyon_macros/src/query_operations/delete.rs @@ -3,7 +3,6 @@ use quote::quote; use crate::utils::macro_tokens::MacroTokens; - /// Generates the TokenStream for the __delete() CRUD operation /// returning a result, indicating a posible failure querying the database pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &String) -> TokenStream { @@ -13,10 +12,14 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri let pk = macro_data.get_primary_key_annotation(); if let Some(primary_key) = pk { - let pk_field = fields.iter() - .find( |f| *f.to_string() == primary_key) - .expect("Something really bad happened finding the syn::Ident for the pk field of the delete"); - let pk_field_value = quote! { &self.#pk_field as &dyn canyon_sql::bounds::QueryParameters<'_> }; + let pk_field = fields + .iter() + .find(|f| *f.to_string() == primary_key) + .expect( + "Something really bad happened finding the Ident for the pk field on the delete", + ); + let pk_field_value = + quote! { &self.#pk_field as &dyn canyon_sql::crud::bounds::QueryParameters<'_> }; quote! { /// Deletes from a database entity the row that matches @@ -25,8 +28,8 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri async fn delete(&self) -> Result<(), Box<(dyn std::error::Error + Send + Sync + 'static)>> { let stmt = format!("DELETE FROM {} WHERE {:?} = $1", #table_schema_data, #primary_key); - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - stmt, + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + stmt, &[#pk_field_value], "" ).await; @@ -35,7 +38,7 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri Err(error) } else { Ok(()) } } - + /// Deletes from a database entity the row that matches /// the current instance of a T type, returning a result /// indicating a posible failure querying the database with the specified datasource. @@ -44,8 +47,8 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri { let stmt = format!("DELETE FROM {} WHERE {:?} = $1", #table_schema_data, #primary_key); - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - stmt, + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + stmt, &[#pk_field_value], datasource_name ).await; @@ -59,7 +62,7 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri // Delete operation over an instance isn't available without declaring a primary key. // The delete querybuilder variant must be used for the case when there's no pk declared quote! { - async fn delete(&self) + async fn delete(&self) -> Result<(), Box> { Err(std::io::Error::new( @@ -70,7 +73,7 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri ).into_inner().unwrap()) } - async fn delete_datasource<'a>(&self, datasource_name: &'a str) + async fn delete_datasource<'a>(&self, datasource_name: &'a str) -> Result<(), Box> { Err(std::io::Error::new( @@ -84,22 +87,39 @@ pub fn generate_delete_tokens(macro_data: &MacroTokens, table_schema_data: &Stri } } -/// Generates the TokenStream for the __delete() CRUD operation as a +/// Generates the TokenStream for the __delete() CRUD operation as a /// [`query_elements::query_builder::QueryBuilder<'a, #ty>`] -pub fn generate_delete_query_tokens(macro_data: &MacroTokens, table_schema_data: &String) -> TokenStream { +pub fn generate_delete_query_tokens( + macro_data: &MacroTokens, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; quote! { - /// Deletes a record on a table for the target database that matches the value - /// of the primary key of the instance - fn delete_query<'a>() -> query_elements::query_builder::QueryBuilder<'a, #ty> { - query_elements::query::Query::new(format!("DELETE FROM {}", #table_schema_data), "") + /// Generates a [`canyon_sql::query::DeleteQueryBuilder`] + /// that allows you to customize the query by adding parameters and constrains dynamically. + /// + /// It performs an `DELETE FROM table_name`, where `table_name` it's the name of your + /// entity but converted to the corresponding database convention, + /// unless concrete values are setted on the available parameters of the + /// `canyon_macro(table_name = "table_name", schema = "schema")` + fn delete_query<'a>() -> canyon_sql::query::DeleteQueryBuilder<'a, #ty> { + canyon_sql::query::DeleteQueryBuilder::new(#table_schema_data, "") } - /// Deletes a record on a table for the target database with the specified - /// values generated with the [`Querybuilder`] and with the - fn delete_query_datasource<'a>(datasource_name: &'a str) -> query_elements::query_builder::QueryBuilder<'a, #ty> { - query_elements::query::Query::new(format!("DELETE FROM {}", #table_schema_data), datasource_name) + /// Generates a [`canyon_sql::query::DeleteQueryBuilder`] + /// that allows you to customize the query by adding parameters and constrains dynamically. + /// + /// It performs an `DELETE FROM table_name`, where `table_name` it's the name of your + /// entity but converted to the corresponding database convention, + /// unless concrete values are setted on the available parameters of the + /// `canyon_macro(table_name = "table_name", schema = "schema")` + /// + /// The query it's made against the database with the configured datasource + /// described in the configuration file, and selected with the [`&str`] + /// passed as parameter. + fn delete_query_datasource<'a>(datasource_name: &'a str) -> canyon_sql::query::DeleteQueryBuilder<'a, #ty> { + canyon_sql::query::DeleteQueryBuilder::new(#table_schema_data, datasource_name) } } -} \ No newline at end of file +} diff --git a/canyon_macros/src/query_operations/insert.rs b/canyon_macros/src/query_operations/insert.rs index fe70e074..8c0ca6f8 100644 --- a/canyon_macros/src/query_operations/insert.rs +++ b/canyon_macros/src/query_operations/insert.rs @@ -9,8 +9,7 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri // Retrieves the fields of the Struct as a collection of Strings, already parsed // the condition of remove the primary key if it's present and it's autoincremental - let insert_columns = macro_data.get_column_names_pk_parsed() - .join(", "); + let insert_columns = macro_data.get_column_names_pk_parsed().join(", "); // Returns a String with the generic $x placeholder for the query parameters. let placeholders = macro_data.placeholders_generator(); @@ -18,7 +17,7 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri // Retrives the fields of the Struct let fields = macro_data.get_struct_fields(); - let insert_values = fields.iter().map( |ident| { + let insert_values = fields.iter().map(|ident| { quote! { &self.#ident } }); let insert_values_cloned = insert_values.clone(); @@ -27,12 +26,15 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri let remove_pk_value_from_fn_entry = if let Some(pk_index) = macro_data.get_pk_index() { quote! { values.remove(#pk_index) } - } else { quote! {} }; - - let pk_ident_type = macro_data._fields_with_types() + } else { + quote! {} + }; + + let pk_ident_type = macro_data + ._fields_with_types() .into_iter() - .find( |(i, _t)| Some(i.to_string()) == primary_key); - + .find(|(i, _t)| Some(i.to_string()) == primary_key); + let insert_transaction = if let Some(pk_data) = &pk_ident_type { let pk_ident = &pk_data.0; let pk_type = &pk_data.1; @@ -41,14 +43,14 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri #remove_pk_value_from_fn_entry; let stmt = format!( - "INSERT INTO {} ({}) VALUES ({}) RETURNING {}", - #table_schema_data, - #insert_columns, + "INSERT INTO {} ({}) VALUES ({}) RETURNING {}", + #table_schema_data, + #insert_columns, #placeholders, #primary_key ); - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( stmt, values, datasource_name @@ -57,15 +59,15 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri match result { Ok(res) => { match res.get_active_ds() { - canyon_sql::canyon_crud::DatabaseType::PostgreSql => { - self.#pk_ident = res.wrapper.get(0) + canyon_sql::crud::DatabaseType::PostgreSql => { + self.#pk_ident = res.postgres.get(0) .expect("No value found on the returning clause") .get::<&str, #pk_type>(#primary_key) .to_owned(); Ok(()) }, - canyon_sql::canyon_crud::DatabaseType::SqlServer => { + canyon_sql::crud::DatabaseType::SqlServer => { self.#pk_ident = res.sqlserver.get(0) .expect("No value found on the returning clause") .get::<#pk_type, &str>(#primary_key) @@ -82,14 +84,14 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri } else { quote! { let stmt = format!( - "INSERT INTO {} ({}) VALUES ({})", - #table_schema_data, - #insert_columns, + "INSERT INTO {} ({}) VALUES ({})", + #table_schema_data, + #insert_columns, #placeholders, #primary_key ); - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( stmt, values, datasource_name @@ -107,17 +109,17 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri /// Inserts into a database entity the current data in `self`, generating a new /// entry (row), returning the `PRIMARY KEY` = `self.` with the specified /// datasource by it's `datasouce name`, defined in the configuration file. - /// - /// This `insert` operation needs a `&mut` reference. That's because typically, + /// + /// This `insert` operation needs a `&mut` reference. That's because typically, /// an insert operation represents *new* data stored in the database, so, when - /// inserted, the database will generate a unique new value for the + /// inserted, the database will generate a unique new value for the /// `pk` field, having a unique identifier for every record, and it will /// automatically assign that returned pk to `self.`. So, after the `insert` /// operation, you instance will have the correct value that is the *PRIMARY KEY* /// of the database row that represents. - /// + /// /// This operation returns a result type, indicating a posible failure querying the database. - /// + /// /// ## *Examples* ///``` /// let mut lec: League = League { @@ -141,29 +143,29 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri /// eprintln!("{:?}", ins_result.err()) /// } /// ``` - /// - async fn insert<'a>(&mut self) - -> Result<(), Box> + /// + async fn insert<'a>(&mut self) + -> Result<(), Box> { let datasource_name = ""; - let mut values: Vec<&dyn canyon_sql::canyon_crud::bounds::QueryParameters<'_>> = vec![#(#insert_values),*]; + let mut values: Vec<&dyn canyon_sql::crud::bounds::QueryParameters<'_>> = vec![#(#insert_values),*]; #insert_transaction } /// Inserts into a database entity the current data in `self`, generating a new /// entry (row), returning the `PRIMARY KEY` = `self.` with the specified /// datasource by it's `datasouce name`, defined in the configuration file. - /// - /// This `insert` operation needs a `&mut` reference. That's because typically, + /// + /// This `insert` operation needs a `&mut` reference. That's because typically, /// an insert operation represents *new* data stored in the database, so, when - /// inserted, the database will generate a unique new value for the + /// inserted, the database will generate a unique new value for the /// `pk` field, having a unique identifier for every record, and it will /// automatically assign that returned pk to `self.`. So, after the `insert` /// operation, you instance will have the correct value that is the *PRIMARY KEY* /// of the database row that represents. - /// + /// /// This operation returns a result type, indicating a posible failure querying the database. - /// + /// /// ## *Examples* ///``` /// let mut lec: League = League { @@ -188,42 +190,43 @@ pub fn generate_insert_tokens(macro_data: &MacroTokens, table_schema_data: &Stri /// } /// ``` /// - async fn insert_datasource<'a>(&mut self, datasource_name: &'a str) - -> Result<(), Box> + async fn insert_datasource<'a>(&mut self, datasource_name: &'a str) + -> Result<(), Box> { - let mut values: Vec<&dyn canyon_sql::canyon_crud::bounds::QueryParameters<'_>> = vec![#(#insert_values_cloned),*]; + let mut values: Vec<&dyn canyon_sql::crud::bounds::QueryParameters<'_>> = vec![#(#insert_values_cloned),*]; #insert_transaction } - + } } /// Generates the TokenStream for the __insert() CRUD operation, but being available -/// as a [`QueryBuilder`] object, and instead of being a method over some [`T`] type, +/// as a [`QueryBuilder`] object, and instead of being a method over some [`T`] type, /// as an associated function for [`T`] -/// +/// /// This, also lets the user to have the option to be able to insert multiple /// [`T`] objects in only one query -pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_data: &String) -> TokenStream { +pub fn generate_multiple_insert_tokens( + macro_data: &MacroTokens, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; // Retrieves the fields of the Struct as continuous String let column_names = macro_data.get_struct_fields_as_strings(); - + // Retrives the fields of the Struct let fields = macro_data.get_struct_fields(); - - let macro_fields = fields.iter().map( |field| - quote! { &instance.#field } - ); + + let macro_fields = fields.iter().map(|field| quote! { &instance.#field }); let macro_fields_cloned = macro_fields.clone(); - let pk = macro_data.get_primary_key_annotation() - .unwrap_or_default(); - - let pk_ident_type = macro_data._fields_with_types() + let pk = macro_data.get_primary_key_annotation().unwrap_or_default(); + + let pk_ident_type = macro_data + ._fields_with_types() .into_iter() - .find( |(i, _t)| i.to_string() == pk); + .find(|(i, _t)| *i == pk); let multi_insert_transaction = if let Some(pk_data) = &pk_ident_type { let pk_ident = &pk_data.0; @@ -238,7 +241,7 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da let mut splitted = mapped_fields.split(", ") .collect::>(); - + let pk_value_index = splitted.iter() .position(|pk| *pk == format!("\"{}\"", #pk).as_str()) .expect("Error. No primary key found when should be there"); @@ -255,7 +258,7 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da let mut inner_counter = 0; fields_placeholders.push('('); vector.remove(pk_value_index); - + for _value in vector.iter() { if inner_counter < vector.len() - 1 { fields_placeholders.push_str(&("$".to_owned() + &values_counter.to_string() + ",")); @@ -277,8 +280,8 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da } let stmt = format!( - "INSERT INTO {} ({}) VALUES {} RETURNING {}", - #table_schema_data, + "INSERT INTO {} ({}) VALUES {} RETURNING {}", + #table_schema_data, mapped_fields, fields_placeholders, #pk @@ -291,8 +294,8 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da } } - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - stmt, + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + stmt, v_arr, datasource_name ).await; @@ -300,10 +303,10 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da match result { Ok(res) => { match res.get_active_ds() { - canyon_sql::canyon_crud::DatabaseType::PostgreSql => { + canyon_sql::crud::DatabaseType::PostgreSql => { for (idx, instance) in instances.iter_mut().enumerate() { instance.#pk_ident = res - .wrapper + .postgres .get(idx) .expect("Failed getting the returned IDs for a multi insert") .get::<&str, #pk_type>(#pk); @@ -311,7 +314,7 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da Ok(()) }, - canyon_sql::canyon_crud::DatabaseType::SqlServer => { + canyon_sql::crud::DatabaseType::SqlServer => { for (idx, instance) in instances.iter_mut().enumerate() { instance.#pk_ident = res .sqlserver @@ -348,7 +351,7 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da for vector in final_values.iter_mut() { let mut inner_counter = 0; fields_placeholders.push('('); - + for _value in vector.iter() { if inner_counter < vector.len() - 1 { fields_placeholders.push_str(&("$".to_owned() + &values_counter.to_string() + ",")); @@ -370,8 +373,8 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da } let stmt = format!( - "INSERT INTO {} ({}) VALUES {}", - #table_schema_data, + "INSERT INTO {} ({}) VALUES {}", + #table_schema_data, mapped_fields, fields_placeholders ); @@ -383,8 +386,8 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da } } - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - stmt, + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + stmt, v_arr, datasource_name ).await; @@ -398,7 +401,7 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da quote! { /// Inserts multiple instances of some type `T` into its related table. - /// + /// /// ``` /// let mut new_league = League { /// id: Default::default(), @@ -431,15 +434,15 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da /// .ok(); /// ``` async fn multi_insert<'a>(instances: &'a mut [&'a mut #ty]) -> ( - Result<(), Box> + Result<(), Box> ) { - use crate::bounds::QueryParameters; + use canyon_sql::crud::bounds::QueryParameters; let datasource_name = ""; - + let mut final_values: Vec>> = Vec::new(); for instance in instances.iter() { let intermediate: &[&dyn QueryParameters<'_>] = &[#(#macro_fields),*]; - + let mut longer_lived: Vec<&dyn QueryParameters<'_>> = Vec::new(); for value in intermediate.into_iter() { longer_lived.push(*value) @@ -449,13 +452,13 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da } let mut mapped_fields: String = String::new(); - + #multi_insert_transaction } /// Inserts multiple instances of some type `T` into its related table with the specified /// datasource by it's `datasouce name`, defined in the configuration file. - /// + /// /// ``` /// let mut new_league = League { /// id: Default::default(), @@ -488,14 +491,14 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da /// .ok(); /// ``` async fn multi_insert_datasource<'a>(instances: &'a mut [&'a mut #ty], datasource_name: &'a str) -> ( - Result<(), Box> + Result<(), Box> ) { - use crate::bounds::QueryParameters; - + use canyon_sql::crud::bounds::QueryParameters; + let mut final_values: Vec>> = Vec::new(); for instance in instances.iter() { let intermediate: &[&dyn QueryParameters<'_>] = &[#(#macro_fields_cloned),*]; - + let mut longer_lived: Vec<&dyn QueryParameters<'_>> = Vec::new(); for value in intermediate.into_iter() { longer_lived.push(*value) @@ -505,8 +508,8 @@ pub fn generate_multiple_insert_tokens(macro_data: &MacroTokens, table_schema_da } let mut mapped_fields: String = String::new(); - + #multi_insert_transaction } } -} \ No newline at end of file +} diff --git a/canyon_macros/src/query_operations/mod.rs b/canyon_macros/src/query_operations/mod.rs index ddade768..dbba723f 100644 --- a/canyon_macros/src/query_operations/mod.rs +++ b/canyon_macros/src/query_operations/mod.rs @@ -1,4 +1,4 @@ -pub mod select; -pub mod insert; pub mod delete; -pub mod update; \ No newline at end of file +pub mod insert; +pub mod select; +pub mod update; diff --git a/canyon_macros/src/query_operations/select.rs b/canyon_macros/src/query_operations/select.rs index 8d044b1c..63344086 100644 --- a/canyon_macros/src/query_operations/select.rs +++ b/canyon_macros/src/query_operations/select.rs @@ -1,4 +1,4 @@ -use canyon_manager::manager::field_annotation::EntityFieldAnnotation; +use canyon_observer::manager::field_annotation::EntityFieldAnnotation; use proc_macro2::TokenStream; use quote::quote; @@ -6,11 +6,14 @@ use quote::quote; use crate::utils::helpers::*; use crate::utils::macro_tokens::MacroTokens; -/// Generates the TokenStream for build the __find_all() CRUD +/// Generates the TokenStream for build the __find_all() CRUD /// associated function -pub fn generate_find_all_unchecked_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &String) -> TokenStream { +pub fn generate_find_all_unchecked_tokens( + macro_data: &MacroTokens<'_>, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; - let stmt = format!("SELECT * FROM {}", table_schema_data); + let stmt = format!("SELECT * FROM {table_schema_data}"); quote! { /// Performns a `SELECT * FROM table_name`, where `table_name` it's @@ -18,7 +21,7 @@ pub fn generate_find_all_unchecked_tokens(macro_data: &MacroTokens<'_>, table_sc /// database convention. P.ej. PostgreSQL preferes table names declared /// with snake_case identifiers. async fn find_all_unchecked<'a>() -> Vec<#ty> { - <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, &[], "" @@ -32,12 +35,12 @@ pub fn generate_find_all_unchecked_tokens(macro_data: &MacroTokens<'_>, table_sc /// the name of your entity but converted to the corresponding /// database convention. P.ej. PostgreSQL preferes table names declared /// with snake_case identifiers. - /// + /// /// The query it's made against the database with the configured datasource - /// described in the configuration file, and selected with the [`&str`] + /// described in the configuration file, and selected with the [`&str`] /// passed as parameter. async fn find_all_unchecked_datasource<'a>(datasource_name: &'a str) -> Vec<#ty> { - <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, &[], datasource_name @@ -46,25 +49,27 @@ pub fn generate_find_all_unchecked_tokens(macro_data: &MacroTokens<'_>, table_sc .unwrap() .get_entities::<#ty>() } - } + } } -/// Generates the TokenStream for build the __find_all_result() CRUD +/// Generates the TokenStream for build the __find_all_result() CRUD /// associated function -pub fn generate_find_all_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &String) -> TokenStream { +pub fn generate_find_all_tokens( + macro_data: &MacroTokens<'_>, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; - let stmt = format!("SELECT * FROM {}", table_schema_data); + let stmt = format!("SELECT * FROM {table_schema_data}"); quote! { /// Performns a `SELECT * FROM table_name`, where `table_name` it's /// the name of your entity but converted to the corresponding /// database convention. P.ej. PostgreSQL preferes table names declared /// with snake_case identifiers. - async fn find_all<'a>() -> - Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> + async fn find_all<'a>() -> + Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { - println!("Find all STMT: {:?}", &#stmt); - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, &[], "" @@ -81,18 +86,18 @@ pub fn generate_find_all_tokens(macro_data: &MacroTokens<'_>, table_schema_data: /// the name of your entity but converted to the corresponding /// database convention. P.ej. PostgreSQL preferes table names declared /// with snake_case identifiers. - /// + /// /// The query it's made against the database with the configured datasource - /// described in the configuration file, and selected with the [`&str`] + /// described in the configuration file, and selected with the [`&str`] /// passed as parameter. - /// + /// /// Also, returns a [`Vec, Error>`], wrapping a possible failure /// querying the database, or, if no errors happens, a Vec containing /// the data found. - async fn find_all_datasource<'a>(datasource_name: &'a str) -> - Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> + async fn find_all_datasource<'a>(datasource_name: &'a str) -> + Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, &[], datasource_name @@ -107,43 +112,51 @@ pub fn generate_find_all_tokens(macro_data: &MacroTokens<'_>, table_schema_data: } } -/// Same as above, but with a [`query_elements::query_builder::QueryBuilder`] -pub fn generate_find_all_query_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &String) -> TokenStream { +/// Same as above, but with a [`canyon_sql::query::QueryBuilder`] +pub fn generate_find_all_query_tokens( + macro_data: &MacroTokens<'_>, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; quote! { - /// Generates a [`canyon_sql::canyon_crud::query_elements::query_builder::QueryBuilder`] + /// Generates a [`canyon_sql::query::SelectQueryBuilder`] /// that allows you to customize the query by adding parameters and constrains dynamically. - /// + /// /// It performs a `SELECT * FROM table_name`, where `table_name` it's the name of your - /// entity but converted to the corresponding database convention. - fn find_all_query<'a>() -> query_elements::query_builder::QueryBuilder<'a, #ty> { - query_elements::query::Query::new(format!("SELECT * FROM {}", #table_schema_data), "") + /// entity but converted to the corresponding database convention, + /// unless concrete values are setted on the available parameters of the + /// `canyon_macro(table_name = "table_name", schema = "schema")` + fn select_query<'a>() -> canyon_sql::query::SelectQueryBuilder<'a, #ty> { + canyon_sql::query::SelectQueryBuilder::new(#table_schema_data, "") } - /// Generates a [`canyon_sql::canyon_crud::query_elements::query_builder::QueryBuilder`] + /// Generates a [`canyon_sql::query::SelectQueryBuilder`] /// that allows you to customize the query by adding parameters and constrains dynamically. - /// + /// /// It performs a `SELECT * FROM table_name`, where `table_name` it's the name of your - /// entity but converted to the corresponding database convention. - /// + /// entity but converted to the corresponding database convention, + /// unless concrete values are setted on the available parameters of the + /// `canyon_macro(table_name = "table_name", schema = "schema")` + /// /// The query it's made against the database with the configured datasource - /// described in the configuration file, and selected with the [`&str`] + /// described in the configuration file, and selected with the [`&str`] /// passed as parameter. - fn find_all_query_datasource<'a>(datasource_name: &'a str) -> - query_elements::query_builder::QueryBuilder<'a, #ty> - { - query_elements::query::Query::new(format!("SELECT * FROM {}", #table_schema_data), datasource_name) + fn select_query_datasource<'a>(datasource_name: &'a str) -> canyon_sql::query::SelectQueryBuilder<'a, #ty> { + canyon_sql::query::SelectQueryBuilder::new(#table_schema_data, datasource_name) } } } /// Performs a COUNT(*) query over some table, returning a [`Result`] wrapping /// a posible success or error coming from the database -pub fn generate_count_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &String) -> TokenStream { +pub fn generate_count_tokens( + macro_data: &MacroTokens<'_>, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; let ty_str = &ty.to_string(); - let stmt = format!("SELECT COUNT (*) FROM {}", table_schema_data); + let stmt = format!("SELECT COUNT (*) FROM {table_schema_data}"); let result_handling = quote! { if let Err(error) = count { @@ -151,15 +164,15 @@ pub fn generate_count_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &S } else { let c = count.ok().unwrap(); match c.get_active_ds() { - canyon_sql::canyon_crud::DatabaseType::PostgreSql => { + canyon_sql::crud::DatabaseType::PostgreSql => { Ok( - c.wrapper.get(0) + c.postgres.get(0) .expect(&format!("Count operation failed for {:?}", #ty_str)) .get::<&str, i64>("count") .to_owned() ) }, - canyon_sql::canyon_crud::DatabaseType::SqlServer => { + canyon_sql::crud::DatabaseType::SqlServer => { Ok( c.sqlserver.get(0) .expect(&format!("Count operation failed for {:?}", #ty_str)) @@ -176,7 +189,7 @@ pub fn generate_count_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &S /// Performs a COUNT(*) query over some table, returning a [`Result`] rather than panicking, /// wrapping a posible success or error coming from the database async fn count() -> Result> { - let count = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let count = <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, &[], "" @@ -188,7 +201,7 @@ pub fn generate_count_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &S /// Performs a COUNT(*) query over some table, returning a [`Result`] rather than panicking, /// wrapping a posible success or error coming from the database with the specified datasource async fn count_datasource<'a>(datasource_name: &'a str) -> Result> { - let count = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let count = <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, &[], datasource_name @@ -200,16 +213,18 @@ pub fn generate_count_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &S } /// Generates the TokenStream for build the __find_by_pk() CRUD operation -pub fn generate_find_by_pk_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &String) -> TokenStream { +pub fn generate_find_by_pk_tokens( + macro_data: &MacroTokens<'_>, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; - let pk = macro_data.get_primary_key_annotation() - .unwrap_or_default(); - let stmt = format!("SELECT * FROM {} WHERE {} = $1", table_schema_data, pk); + let pk = macro_data.get_primary_key_annotation().unwrap_or_default(); + let stmt = format!("SELECT * FROM {table_schema_data} WHERE {pk} = $1"); // Disabled if there's no `primary_key` annotation - if pk == "" { + if pk.is_empty() { return quote! { - async fn find_by_pk<'a>(value: &'a dyn canyon_sql::canyon_crud::bounds::QueryParameters<'a>) + async fn find_by_pk<'a>(value: &'a dyn canyon_sql::crud::bounds::QueryParameters<'a>) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { Err( @@ -223,7 +238,7 @@ pub fn generate_find_by_pk_tokens(macro_data: &MacroTokens<'_>, table_schema_dat } async fn find_by_pk_datasource<'a>( - value: &'a dyn canyon_sql::canyon_crud::bounds::QueryParameters<'a>, + value: &'a dyn canyon_sql::crud::bounds::QueryParameters<'a>, datasource_name: &'a str ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { Err( @@ -235,91 +250,90 @@ pub fn generate_find_by_pk_tokens(macro_data: &MacroTokens<'_>, table_schema_dat ).into_inner().unwrap() ) } - }; + }; } let result_handling = quote! { if let Err(error) = result { Err(error) - } else { + } else { match result.as_ref().ok().unwrap() { n if n.number_of_results() == 0 => Ok(None), _ => Ok( Some( - result - .ok() - .unwrap() - .get_entities::<#ty>()[0] - .clone() + result.unwrap() + .get_entities::<#ty>() + .remove(0) ) ) - } + } } }; quote! { - /// Finds an element on the queried table that matches the - /// value of the field annotated with the `primary_key` attribute, - /// filtering by the column that it's declared as the primary + /// Finds an element on the queried table that matches the + /// value of the field annotated with the `primary_key` attribute, + /// filtering by the column that it's declared as the primary /// key on the database. - /// + /// /// This operation it's only available if the [`CanyonEntity`] contains /// some field declared as primary key. - /// + /// /// Also, returns a [`Result, Error>`], wrapping a possible failure /// querying the database, or, if no errors happens, a success containing /// and Option with the data found wrapped in the Some(T) variant, /// or None if the value isn't found on the table. - async fn find_by_pk<'a>(value: &'a dyn canyon_sql::canyon_crud::bounds::QueryParameters<'a>) -> + async fn find_by_pk<'a>(value: &'a dyn canyon_sql::crud::bounds::QueryParameters<'a>) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( #stmt, vec![value], "" ).await; - + #result_handling } - /// Finds an element on the queried table that matches the - /// value of the field annotated with the `primary_key` attribute, - /// filtering by the column that it's declared as the primary + /// Finds an element on the queried table that matches the + /// value of the field annotated with the `primary_key` attribute, + /// filtering by the column that it's declared as the primary /// key on the database. - /// + /// /// The query it's made against the database with the configured datasource - /// described in the configuration file, and selected with the [`&str`] + /// described in the configuration file, and selected with the [`&str`] /// passed as parameter. - /// + /// /// This operation it's only available if the [`CanyonEntity`] contains /// some field declared as primary key. - /// + /// /// Also, returns a [`Result, Error>`], wrapping a possible failure /// querying the database, or, if no errors happens, a success containing /// and Option with the data found wrapped in the Some(T) variant, /// or None if the value isn't found on the table. async fn find_by_pk_datasource<'a>( - value: &'a dyn canyon_sql::canyon_crud::bounds::QueryParameters<'a>, + value: &'a dyn canyon_sql::crud::bounds::QueryParameters<'a>, datasource_name: &'a str ) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> { - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - #stmt, + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + #stmt, vec![value], datasource_name ).await; - + #result_handling } } } - /// Generates the TokenStream for build the search by foreign key feature, also as a method instance /// of a T type of as an associated function of same T type, but wrapped as a Result, representing /// a posible failure querying the database, a bad or missing FK annotation or a missed ForeignKeyable /// derive macro on the parent side of the relation -pub fn generate_find_by_foreign_key_tokens(macro_data: &MacroTokens<'_>) -> Vec<(TokenStream, TokenStream)>{ +pub fn generate_find_by_foreign_key_tokens( + macro_data: &MacroTokens<'_>, +) -> Vec<(TokenStream, TokenStream)> { let mut fk_quotes: Vec<(TokenStream, TokenStream)> = Vec::new(); for (field_ident, fk_annot) in macro_data.get_fk_annotations().iter() { @@ -330,69 +344,68 @@ pub fn generate_find_by_foreign_key_tokens(macro_data: &MacroTokens<'_>) -> Vec< // related entity in some way, and compare it with something else let fk_ty = database_table_name_to_struct_ident(table); - // Generate and identifier for the method based on the convention of "search_related_types" + // Generate and identifier for the method based on the convention of "search_related_types" // where types is a placeholder for the plural name of the type referenced - let method_name_ident = proc_macro2::Ident::new( - &method_name, proc_macro2::Span::call_site() - ); + let method_name_ident = + proc_macro2::Ident::new(&method_name, proc_macro2::Span::call_site()); let method_name_ident_ds = proc_macro2::Ident::new( - &format!("{}_datasource", &method_name), proc_macro2::Span::call_site() + &format!("{}_datasource", &method_name), + proc_macro2::Span::call_site(), ); - let quoted_method_signature: TokenStream = quote! { - async fn #method_name_ident(&self) -> + let quoted_method_signature: TokenStream = quote! { + async fn #method_name_ident(&self) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> }; - let quoted_datasource_method_signature: TokenStream = quote! { - async fn #method_name_ident_ds<'a>(&self, datasource_name: &'a str) -> + let quoted_datasource_method_signature: TokenStream = quote! { + async fn #method_name_ident_ds<'a>(&self, datasource_name: &'a str) -> Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> }; let stmt = format!( - "SELECT * FROM {} WHERE {} = $1", + "SELECT * FROM {} WHERE {} = $1", table, - format!("\"{}\"", column).as_str(), + format!("\"{column}\"").as_str(), ); let result_handler = quote! { if let Err(error) = result { Err(error) - } else { + } else { match result.as_ref().ok().unwrap() { n if n.number_of_results() == 0 => Ok(None), _ => Ok(Some( result - .ok() .unwrap() - .get_entities::<#fk_ty>()[0] - .clone() + .get_entities::<#fk_ty>() + .remove(0) )) - } + } } }; - + fk_quotes.push(( quote!{ #quoted_method_signature; }, quote! { /// Searches the parent entity (if exists) for this type #quoted_method_signature { - let result = <#fk_ty as canyon_sql::canyon_crud::crud::Transaction<#fk_ty>>::query( - #stmt, - &[&self.#field_ident as &dyn canyon_sql::bounds::QueryParameters<'_>], + let result = <#fk_ty as canyon_sql::crud::Transaction<#fk_ty>>::query( + #stmt, + &[&self.#field_ident as &dyn canyon_sql::crud::bounds::QueryParameters<'_>], "" ).await; - + #result_handler } } )); - + fk_quotes.push(( quote! { #quoted_datasource_method_signature; }, quote! { /// Searches the parent entity (if exists) for this type with the specified datasource #quoted_datasource_method_signature { - let result = <#fk_ty as canyon_sql::canyon_crud::crud::Transaction<#fk_ty>>::query( - #stmt, - &[&self.#field_ident as &dyn canyon_sql::bounds::QueryParameters<'_>], + let result = <#fk_ty as canyon_sql::crud::Transaction<#fk_ty>>::query( + #stmt, + &[&self.#field_ident as &dyn canyon_sql::crud::bounds::QueryParameters<'_>], datasource_name ).await; @@ -402,15 +415,18 @@ pub fn generate_find_by_foreign_key_tokens(macro_data: &MacroTokens<'_>) -> Vec< )); } } - + fk_quotes } -/// Generates the TokenStream for build the __search_by_foreign_key() CRUD +/// Generates the TokenStream for build the __search_by_foreign_key() CRUD /// associated function, but wrapped as a Result, representing /// a posible failure querying the database, a bad or missing FK annotation or a missed ForeignKeyable /// derive macro on the parent side of the relation -pub fn generate_find_by_reverse_foreign_key_tokens(macro_data: &MacroTokens<'_>, table_schema_data: &String) -> Vec<(TokenStream, TokenStream)> { +pub fn generate_find_by_reverse_foreign_key_tokens( + macro_data: &MacroTokens<'_>, + table_schema_data: &String, +) -> Vec<(TokenStream, TokenStream)> { let mut rev_fk_quotes: Vec<(TokenStream, TokenStream)> = Vec::new(); let ty = macro_data.ty; @@ -420,39 +436,37 @@ pub fn generate_find_by_reverse_foreign_key_tokens(macro_data: &MacroTokens<'_>, // Generate and identifier for the method based on the convention of "search_by__" (note the double underscore) // plus the 'table_name' property of the ForeignKey annotation - let method_name_ident = proc_macro2::Ident::new( - &method_name, proc_macro2::Span::call_site() - ); + let method_name_ident = + proc_macro2::Ident::new(&method_name, proc_macro2::Span::call_site()); let method_name_ident_ds = proc_macro2::Ident::new( - &format!("{}_datasource", &method_name), proc_macro2::Span::call_site() + &format!("{}_datasource", &method_name), + proc_macro2::Span::call_site(), ); - let quoted_method_signature: TokenStream = quote! { - async fn #method_name_ident<'a, F: canyon_sql::bounds::ForeignKeyable + Sync + Send>(value: &F) -> - Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> - // where >::Output: canyon_sql::bounds::QueryParameters<'a> + let quoted_method_signature: TokenStream = quote! { + async fn #method_name_ident<'a, F: canyon_sql::crud::bounds::ForeignKeyable + Sync + Send>(value: &F) -> + Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> }; - let quoted_datasource_method_signature: TokenStream = quote! { - async fn #method_name_ident_ds<'a, F: canyon_sql::bounds::ForeignKeyable + Sync + Send> - (value: &F, datasource_name: &'a str) -> - Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> - // where >::Output: canyon_sql::bounds::QueryParameters<'a> + let quoted_datasource_method_signature: TokenStream = quote! { + async fn #method_name_ident_ds<'a, F: canyon_sql::crud::bounds::ForeignKeyable + Sync + Send> + (value: &F, datasource_name: &'a str) -> + Result, Box<(dyn std::error::Error + Send + Sync + 'static)>> }; let result_handler = quote! { if let Err(error) = result { Err(error) - } else { + } else { Ok(result.ok().unwrap().get_entities::<#ty>()) } }; let f_ident = field_ident.to_string(); rev_fk_quotes.push(( - quote!{ #quoted_method_signature; }, + quote! { #quoted_method_signature; }, quote! { /// Given a parent entity T annotated with the derive proc macro `ForeignKeyable`, /// performns a search to find the childs that belong to that concrete parent. - #quoted_method_signature + #quoted_method_signature { let lookage_value = value.get_fk_column(#column) .expect(format!( @@ -460,21 +474,20 @@ pub fn generate_find_by_reverse_foreign_key_tokens(macro_data: &MacroTokens<'_>, ).as_str()); let stmt = format!( - "SELECT * FROM {} WHERE {} = {}", + "SELECT * FROM {} WHERE {} = $1", #table_schema_data, - format!("\"{}\"", #f_ident).as_str(), - lookage_value + format!("\"{}\"", #f_ident).as_str() ); - - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - stmt, - &[], + + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + stmt, + &[lookage_value], "" ).await; - + #result_handler } - } + }, )); rev_fk_quotes.push(( @@ -491,25 +504,23 @@ pub fn generate_find_by_reverse_foreign_key_tokens(macro_data: &MacroTokens<'_>, ).as_str()); let stmt = format!( - "SELECT * FROM {} WHERE {} = {}", + "SELECT * FROM {} WHERE {} = $1", #table_schema_data, - format!("\"{}\"", #f_ident).as_str(), - lookage_value // TODO The unique non-parametrized query parameter - // We must convert it to QueryParameters + format!("\"{}\"", #f_ident).as_str() ); - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( - stmt, - &[], + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( + stmt, + &[lookage_value], datasource_name ).await; - + #result_handler } - } + }, )); } } - + rev_fk_quotes -} \ No newline at end of file +} diff --git a/canyon_macros/src/query_operations/update.rs b/canyon_macros/src/query_operations/update.rs index 45e7ea1e..d4496a2d 100644 --- a/canyon_macros/src/query_operations/update.rs +++ b/canyon_macros/src/query_operations/update.rs @@ -12,25 +12,24 @@ pub fn generate_update_tokens(macro_data: &MacroTokens, table_schema_data: &Stri // Retrives the fields of the Struct let fields = macro_data.get_struct_fields(); - let mut vec_columns_values:Vec = Vec::new(); + let mut vec_columns_values: Vec = Vec::new(); for (i, column_name) in update_columns.iter().enumerate() { - let column_equal_value = format!( - "{} = ${}", column_name.to_owned(), i + 2 - ); + let column_equal_value = format!("{} = ${}", column_name.to_owned(), i + 2); vec_columns_values.push(column_equal_value) } let str_columns_values = vec_columns_values.join(", "); - let update_values = fields.iter().map( |ident| { + let update_values = fields.iter().map(|ident| { quote! { &self.#ident } }); let update_values_cloned = update_values.clone(); if let Some(primary_key) = macro_data.get_primary_key_annotation() { - let pk_index = macro_data.get_pk_index() + let pk_index = macro_data + .get_pk_index() .expect("Update method failed to retrieve the index of the primary key"); - + quote! { /// Updates a database record that matches /// the current instance of a T type, returning a result @@ -40,9 +39,9 @@ pub fn generate_update_tokens(macro_data: &MacroTokens, table_schema_data: &Stri "UPDATE {} SET {} WHERE {} = ${:?}", #table_schema_data, #str_columns_values, #primary_key, #pk_index + 1 ); - let update_values: &[&dyn canyon_sql::bounds::QueryParameters<'_>] = &[#(#update_values),*]; + let update_values: &[&dyn canyon_sql::crud::bounds::QueryParameters<'_>] = &[#(#update_values),*]; - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( stmt, update_values, "" ).await; @@ -56,16 +55,16 @@ pub fn generate_update_tokens(macro_data: &MacroTokens, table_schema_data: &Stri /// the current instance of a T type, returning a result /// indicating a posible failure querying the database with the /// specified datasource - async fn update_datasource<'a>(&self, datasource_name: &'a str) + async fn update_datasource<'a>(&self, datasource_name: &'a str) -> Result<(), Box> { let stmt = format!( "UPDATE {} SET {} WHERE {} = ${:?}", #table_schema_data, #str_columns_values, #primary_key, #pk_index + 1 ); - let update_values: &[&dyn canyon_sql::bounds::QueryParameters<'_>] = &[#(#update_values_cloned),*]; + let update_values: &[&dyn canyon_sql::crud::bounds::QueryParameters<'_>] = &[#(#update_values_cloned),*]; - let result = <#ty as canyon_sql::canyon_crud::crud::Transaction<#ty>>::query( + let result = <#ty as canyon_sql::crud::Transaction<#ty>>::query( stmt, update_values, datasource_name ).await; @@ -80,7 +79,7 @@ pub fn generate_update_tokens(macro_data: &MacroTokens, table_schema_data: &Stri // TODO Returning an error should be a provisional way of doing this quote! { - async fn update(&self) + async fn update(&self) -> Result<(), Box> { Err( @@ -93,7 +92,7 @@ pub fn generate_update_tokens(macro_data: &MacroTokens, table_schema_data: &Stri ) } - async fn update_datasource<'a>(&self, datasource_name: &'a str) + async fn update_datasource<'a>(&self, datasource_name: &'a str) -> Result<(), Box> { Err( @@ -107,25 +106,41 @@ pub fn generate_update_tokens(macro_data: &MacroTokens, table_schema_data: &Stri } } } - } /// Generates the TokenStream for the __update() CRUD operation /// being the query generated with the [`QueryBuilder`] -pub fn generate_update_query_tokens(macro_data: &MacroTokens, table_schema_data: &String) -> TokenStream { +pub fn generate_update_query_tokens( + macro_data: &MacroTokens, + table_schema_data: &String, +) -> TokenStream { let ty = macro_data.ty; quote! { - /// TODO docs - fn update_query<'a>() -> query_elements::query_builder::QueryBuilder<'a, #ty> { - query_elements::query::Query::new(format!("UPDATE {}", #table_schema_data), "") + /// Generates a [`canyon_sql::query::UpdateQueryBuilder`] + /// that allows you to customize the query by adding parameters and constrains dynamically. + /// + /// It performs an `UPDATE table_name`, where `table_name` it's the name of your + /// entity but converted to the corresponding database convention, + /// unless concrete values are setted on the available parameters of the + /// `canyon_macro(table_name = "table_name", schema = "schema")` + fn update_query<'a>() -> canyon_sql::query::UpdateQueryBuilder<'a, #ty> { + canyon_sql::query::UpdateQueryBuilder::new(#table_schema_data, "") } - /// TODO docs - fn update_query_datasource<'a>(datasource_name: &'a str) - -> query_elements::query_builder::QueryBuilder<'a, #ty> - { - query_elements::query::Query::new(format!("UPDATE {}", #table_schema_data), datasource_name) + /// Generates a [`canyon_sql::query::UpdateQueryBuilder`] + /// that allows you to customize the query by adding parameters and constrains dynamically. + /// + /// It performs an `UPDATE table_name`, where `table_name` it's the name of your + /// entity but converted to the corresponding database convention, + /// unless concrete values are setted on the available parameters of the + /// `canyon_macro(table_name = "table_name", schema = "schema")` + /// + /// The query it's made against the database with the configured datasource + /// described in the configuration file, and selected with the [`&str`] + /// passed as parameter. + fn update_query_datasource<'a>(datasource_name: &'a str) -> canyon_sql::query::UpdateQueryBuilder<'a, #ty> { + canyon_sql::query::UpdateQueryBuilder::new(#table_schema_data, datasource_name) } } -} \ No newline at end of file +} diff --git a/canyon_macros/src/utils/function_parser.rs b/canyon_macros/src/utils/function_parser.rs index 1e07a3d9..4ab62025 100644 --- a/canyon_macros/src/utils/function_parser.rs +++ b/canyon_macros/src/utils/function_parser.rs @@ -1,4 +1,7 @@ -use syn::{parse::{Parse, ParseBuffer}, ItemFn, Attribute, Visibility, Signature, Block}; +use syn::{ + parse::{Parse, ParseBuffer}, + Attribute, Block, ItemFn, Signature, Visibility, +}; /// Implementation of syn::Parse for the `#[canyon]` proc-macro #[derive(Clone)] @@ -6,7 +9,7 @@ pub struct FunctionParser { pub attrs: Vec, pub vis: Visibility, pub sig: Signature, - pub block: Box + pub block: Box, } impl Parse for FunctionParser { @@ -14,22 +17,18 @@ impl Parse for FunctionParser { let func = input.parse::(); if func.is_err() { - return Err( - syn::Error::new( - input.cursor().span(), "Error on `fn main()`" - ) - ) + return Err(syn::Error::new( + input.cursor().span(), + "Error on `fn main()`", + )); } let func_ok = func.ok().unwrap(); - Ok( - Self { - attrs: func_ok.attrs, - vis: func_ok.vis, - sig: func_ok.sig, - block: func_ok.block - } - ) - + Ok(Self { + attrs: func_ok.attrs, + vis: func_ok.vis, + sig: func_ok.sig, + block: func_ok.block, + }) } -} \ No newline at end of file +} diff --git a/canyon_macros/src/utils/helpers.rs b/canyon_macros/src/utils/helpers.rs index 477edf0b..9ad14792 100644 --- a/canyon_macros/src/utils/helpers.rs +++ b/canyon_macros/src/utils/helpers.rs @@ -1,14 +1,10 @@ -use proc_macro2::{Ident, TokenStream, Span}; -use syn::{ - Token, - punctuated::Punctuated, - MetaNameValue -}; +use proc_macro2::{Ident, Span, TokenStream}; +use syn::{punctuated::Punctuated, MetaNameValue, Token}; use super::macro_tokens::MacroTokens; /// If the `canyon_entity` macro has valid attributes attached, and those attrs are the -/// user's desired `table_name` and/or the `schema_name`, this method returns its +/// user's desired `table_name` and/or the `schema_name`, this method returns its /// correct form to be wired as the table name that the CRUD methods requires for generate /// the queries pub fn table_schema_parser(macro_data: &MacroTokens<'_>) -> Result { @@ -16,12 +12,14 @@ pub fn table_schema_parser(macro_data: &MacroTokens<'_>) -> Result = None; for attr in macro_data.attrs { - if attr.path.segments.iter().any( - |seg| - seg.ident.to_string() == "canyon_macros" || seg.ident.to_string() == "canyon_entity" - ) { - let name_values_result: Result, syn::Error> = - attr.parse_args_with(Punctuated::parse_terminated); + if attr + .path + .segments + .iter() + .any(|seg| seg.ident == "canyon_macros" || seg.ident == "canyon_entity") + { + let name_values_result: Result, syn::Error> = + attr.parse_args_with(Punctuated::parse_terminated); match name_values_result { Ok(meta_name_values) => { @@ -38,60 +36,56 @@ pub fn table_schema_parser(macro_data: &MacroTokens<'_>) -> Result + _ => return Err( syn::Error::new_spanned( - Ident::new(&identifier, i.span().into()), - "Only string literals are valid values for the attribute arguments" - ).into_compile_error() + Ident::new(&identifier, i.span()), + "Only string literals are valid values for the attribute arguments" + ).into_compile_error() ), } } else { - return Err( - syn::Error::new( - Span::call_site(), - "Only string literals are valid values for the attribute arguments" - ).into_compile_error() - ); + return Err(syn::Error::new( + Span::call_site(), + "Only string literals are valid values for the attribute arguments", + ) + .into_compile_error()); } } - }, + } Err(_) => return Ok(macro_data.ty.to_string()), } let mut final_table_name = String::new(); - if schema.is_some() { - final_table_name.push_str( - format!("{}.", schema.unwrap()).as_str() - ) - } - if table_name.is_some() { - final_table_name.push_str(table_name.unwrap().as_str()) - } else { - final_table_name.push_str(macro_data.ty.to_string().as_str()) - } - - return Ok(final_table_name); + if schema.is_some() { + final_table_name.push_str(format!("{}.", schema.unwrap()).as_str()) + } + + if let Some(t_name) = table_name { + final_table_name.push_str(t_name.as_str()) + } else { + final_table_name.push_str(macro_data.ty.to_string().as_str()) + } + + return Ok(final_table_name); } } Ok(macro_data.ty.to_string()) } - /// Parses a syn::Identifier to get a snake case database name from the type identifier -/// TODO: #[macro(table_name = 'user_defined_db_table_name)]' +/// TODO: #[macro(table_name = 'user_defined_db_table_name)]' pub fn _database_table_name_from_struct(ty: &Ident) -> String { - - let struct_name: String = String::from(ty.to_string()); + let struct_name: String = ty.to_string(); let mut table_name: String = String::new(); - + let mut index = 0; for char in struct_name.chars() { if index < 1 { @@ -101,23 +95,22 @@ pub fn _database_table_name_from_struct(ty: &Ident) -> String { match char { n if n.is_ascii_uppercase() => { table_name.push('_'); - table_name.push(n.to_ascii_lowercase()); + table_name.push(n.to_ascii_lowercase()); } - _ => table_name.push(char) + _ => table_name.push(char), } - } + } } table_name } /// Parses a syn::Identifier to get a snake case database name from the type identifier -/// TODO: #[macro(table_name = 'user_defined_db_table_name)]' +/// TODO: #[macro(table_name = 'user_defined_db_table_name)]' pub fn _database_table_name_from_entity_name(ty: &str) -> String { - - let struct_name: String = String::from(ty.to_string()); + let struct_name: String = ty.to_string(); let mut table_name: String = String::new(); - + let mut index = 0; for char in struct_name.chars() { if index < 1 { @@ -127,11 +120,11 @@ pub fn _database_table_name_from_entity_name(ty: &str) -> String { match char { n if n.is_ascii_uppercase() => { table_name.push('_'); - table_name.push(n.to_ascii_lowercase()); + table_name.push(n.to_ascii_lowercase()); } - _ => table_name.push(char) + _ => table_name.push(char), } - } + } } table_name @@ -140,7 +133,7 @@ pub fn _database_table_name_from_entity_name(ty: &str) -> String { /// Parses the content of an &str to get the related identifier of a type pub fn database_table_name_to_struct_ident(name: &str) -> Ident { let mut struct_name: String = String::new(); - + let mut first_iteration = true; let mut previous_was_underscore = false; @@ -152,19 +145,18 @@ pub fn database_table_name_to_struct_ident(name: &str) -> Ident { match char { n if n == '_' => { previous_was_underscore = true; - }, + } char if char.is_ascii_lowercase() => { if previous_was_underscore { struct_name.push(char.to_ascii_lowercase()) - } else { struct_name.push(char) } - }, - _ => panic!("Detected wrong format or broken convention for database table names") + } else { + struct_name.push(char) + } + } + _ => panic!("Detected wrong format or broken convention for database table names"), } - } + } } - Ident::new( - &struct_name, - proc_macro2::Span::call_site() - ) + Ident::new(&struct_name, proc_macro2::Span::call_site()) } diff --git a/canyon_macros/src/utils/macro_tokens.rs b/canyon_macros/src/utils/macro_tokens.rs index f35eef3c..e00c5424 100644 --- a/canyon_macros/src/utils/macro_tokens.rs +++ b/canyon_macros/src/utils/macro_tokens.rs @@ -1,24 +1,17 @@ use std::convert::TryFrom; -use canyon_manager::manager::field_annotation::EntityFieldAnnotation; +use canyon_observer::manager::field_annotation::EntityFieldAnnotation; use proc_macro2::Ident; -use syn::{ - Visibility, - Generics, - DeriveInput, - Fields, - Type, - Attribute -}; - -/// Provides a convenient way of store the data for the TokenStream +use syn::{Attribute, DeriveInput, Fields, Generics, Type, Visibility}; + +/// Provides a convenient way of store the data for the TokenStream /// received on a macro pub struct MacroTokens<'a> { pub vis: &'a Visibility, pub ty: &'a Ident, pub generics: &'a Generics, pub attrs: &'a Vec, - pub fields: &'a Fields + pub fields: &'a Fields, } impl<'a> MacroTokens<'a> { @@ -31,7 +24,7 @@ impl<'a> MacroTokens<'a> { fields: match &ast.data { syn::Data::Struct(ref s) => &s.fields, _ => panic!("This derive macro can only be automatically derived for structs"), - } + }, } } @@ -40,107 +33,93 @@ impl<'a> MacroTokens<'a> { pub fn _fields_with_visibility_and_types(&self) -> Vec<(Visibility, Ident, Type)> { self.fields .iter() - .map( |field| + .map(|field| { ( - field.vis.clone(), + field.vis.clone(), field.ident.as_ref().unwrap().clone(), - field.ty.clone() - ) - ) - .collect::>() + field.ty.clone(), + ) + }) + .collect::>() } - /// Gives a Vec ot tuples that contains the name and /// the type of every field on a Struct pub fn _fields_with_types(&self) -> Vec<(Ident, Type)> { self.fields .iter() - .map( |field| - ( - field.ident.as_ref().unwrap().clone(), - field.ty.clone() - ) - ) - .collect::>() + .map(|field| (field.ident.as_ref().unwrap().clone(), field.ty.clone())) + .collect::>() } - /// Gives a Vec of Ident with the fields of a Struct pub fn get_struct_fields(&self) -> Vec { self.fields .iter() - .map( |field| - field.ident.as_ref().unwrap().clone(), - ) - .collect::>() + .map(|field| field.ident.as_ref().unwrap().clone()) + .collect::>() } - /// Gives a Vec populated with the name of the fields of the struct + /// Gives a Vec populated with the name of the fields of the struct pub fn _get_struct_fields_as_collection_strings(&self) -> Vec { self.get_struct_fields() .iter() - .map( |ident| { - ident.to_owned().to_string() - } - ).collect::>() + .map(|ident| ident.to_owned().to_string()) + .collect::>() } /// Returns a Vec populated with the name of the fields of the struct /// already quote scaped for avoid the upper case column name mangling. - /// + /// /// If the type contains a `#[primary_key]` annotation (and), returns the /// name of the columns without the fields that maps against the column designed as /// primary key (if its present and its autoincremental attribute is setted to true) /// (autoincremental = true) or its without the autoincremental attribute, which leads /// to the same behaviour. - /// - /// Returns every field if there's no PK, or if it's present but autoincremental = false + /// + /// Returns every field if there's no PK, or if it's present but autoincremental = false pub fn get_column_names_pk_parsed(&self) -> Vec { self.fields .iter() - .filter( |field| { - if field.attrs.len() > 0 { - field.attrs.iter().any( |attr| - { - let a = attr.path.segments[0].clone().ident; - let b = attr.tokens.to_string(); - if a.to_string() == "primary_key" || b.to_string().contains("false") { - false - } else { true } - } - ) - } else { true } + .filter(|field| { + if !field.attrs.is_empty() { + field.attrs.iter().any(|attr| { + let a = attr.path.segments[0].clone().ident; + let b = attr.tokens.to_string(); + !(a == "primary_key" || b.contains("false")) + }) + } else { + true } - ).map( |c| - format!( "\"{}\"", c.ident.as_ref().unwrap().to_string() ) - ).collect::>() + }) + .map(|c| format!("\"{}\"", c.ident.as_ref().unwrap())) + .collect::>() } /// Retrieves the fields of the Struct as continuous String, comma separated pub fn get_struct_fields_as_strings(&self) -> String { - let column_names: String = self.get_struct_fields() + let column_names: String = self + .get_struct_fields() .iter() - .map( |ident| { - ident.to_owned().to_string() - }).collect::>() - .iter() - .map( |column| column.to_owned() + ", ") + .map(|ident| ident.to_owned().to_string()) + .collect::>() + .iter() + .map(|column| column.to_owned() + ", ") .collect::(); - + let mut column_names_as_chars = column_names.chars(); column_names_as_chars.next_back(); column_names_as_chars.next_back(); - + column_names_as_chars.as_str().to_owned() } - /// + /// pub fn get_pk_index(&self) -> Option { let mut pk_index = None; for (idx, field) in self.fields.iter().enumerate() { for attr in &field.attrs { - if attr.path.segments[0].clone().ident.to_string() == "primary_key" { + if attr.path.segments[0].clone().ident == "primary_key" { pk_index = Some(idx); } } @@ -151,45 +130,35 @@ impl<'a> MacroTokens<'a> { /// Utility for find the primary key attribute (if exists) and the /// column name (field) which belongs pub fn get_primary_key_annotation(&self) -> Option { - let f = self.fields - .iter() - .find( |field| - field.attrs.iter() - .map( |attr| - attr.path.segments[0].clone().ident - ).map( |ident| - ident.to_string() - ).find( |a| - a == "primary_key" - ) == Some("primary_key".to_string()) - ); - - f.map( |v| v.ident.clone().unwrap().to_string()) + let f = self.fields.iter().find(|field| { + field + .attrs + .iter() + .map(|attr| attr.path.segments[0].clone().ident) + .map(|ident| ident.to_string()) + .find(|a| a == "primary_key") + == Some("primary_key".to_string()) + }); + + f.map(|v| v.ident.clone().unwrap().to_string()) } - /// Utility for find the `foreign_key` attributes (if exists) + /// Utility for find the `foreign_key` attributes (if exists) pub fn get_fk_annotations(&self) -> Vec<(&Ident, EntityFieldAnnotation)> { let mut foreign_key_annotations = Vec::new(); - self.fields - .iter() - .for_each( |field| { - let attrs = field.attrs.iter() - .filter( |attr| - attr.path.segments[0].clone().ident.to_string() == "foreign_key" - ); - attrs.for_each( - |attr| { - let fk_parse = EntityFieldAnnotation::try_from(&attr); - if let Ok(fk_annotation) = fk_parse { - foreign_key_annotations.push( - (field.ident.as_ref().unwrap(), fk_annotation) - ) - } - } - ); + self.fields.iter().for_each(|field| { + let attrs = field + .attrs + .iter() + .filter(|attr| attr.path.segments[0].clone().ident == "foreign_key"); + attrs.for_each(|attr| { + let fk_parse = EntityFieldAnnotation::try_from(&attr); + if let Ok(fk_annotation) = fk_parse { + foreign_key_annotations.push((field.ident.as_ref().unwrap(), fk_annotation)) } - ); + }); + }); foreign_key_annotations } @@ -197,22 +166,20 @@ impl<'a> MacroTokens<'a> { /// Boolean that returns true if the type contains a `#[primary_key]` /// annotation. False otherwise. pub fn type_has_primary_key(&self) -> bool { - self.fields.iter() - .any( |field| - field.attrs.iter() - .map( |attr| - attr.path.segments[0].clone().ident - ).map( |ident| - ident.to_string() - ).find ( |a| - a == "primary_key" - ) == Some("primary_key".to_string()) - ) + self.fields.iter().any(|field| { + field + .attrs + .iter() + .map(|attr| attr.path.segments[0].clone().ident) + .map(|ident| ident.to_string()) + .find(|a| a == "primary_key") + == Some("primary_key".to_string()) + }) } /// Returns an String ready to be inserted on the VALUES Sql clause /// representing generic query parameters ($x). - /// + /// /// Already returns the correct number of placeholders, skipping one /// entry in the type contains a `#[primary_key]` pub fn placeholders_generator(&self) -> String { @@ -237,4 +204,4 @@ impl<'a> MacroTokens<'a> { placeholders } -} \ No newline at end of file +} diff --git a/canyon_macros/src/utils/mod.rs b/canyon_macros/src/utils/mod.rs index c03faabf..be2269df 100644 --- a/canyon_macros/src/utils/mod.rs +++ b/canyon_macros/src/utils/mod.rs @@ -1,3 +1,3 @@ -pub mod macro_tokens; pub mod function_parser; -pub mod helpers; \ No newline at end of file +pub mod helpers; +pub mod macro_tokens; diff --git a/canyon_manager/Cargo.toml b/canyon_manager/Cargo.toml deleted file mode 100644 index 4f15b661..00000000 --- a/canyon_manager/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "canyon_manager" -version = "1.0.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -syn = { version = "1.0.86", features = ["full", "parsing"] } -quote = "1.0.9" -proc-macro2 = "1.0.27" - -# Debug -partialdebug = "0.2.0" \ No newline at end of file diff --git a/canyon_manager/src/lib.rs b/canyon_manager/src/lib.rs deleted file mode 100644 index 057531ad..00000000 --- a/canyon_manager/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod manager; \ No newline at end of file diff --git a/canyon_manager/src/manager/field_annotation.rs b/canyon_manager/src/manager/field_annotation.rs deleted file mode 100644 index 63c8c127..00000000 --- a/canyon_manager/src/manager/field_annotation.rs +++ /dev/null @@ -1,157 +0,0 @@ -use std::{convert::TryFrom, collections::HashMap}; -use proc_macro2::Ident; -use syn::{Attribute, Token, punctuated::Punctuated, MetaNameValue}; - -/// The available annotations for a field that belongs to any struct -/// annotaded with `#[canyon_entity]` -#[derive(Debug, Clone)] -pub enum EntityFieldAnnotation { - PrimaryKey(bool), - ForeignKey(String, String) -} - -impl EntityFieldAnnotation { - /// Returns the data of the [`EntityFieldAnnotation`] in a understandable format for `Canyon` - pub fn get_as_string(&self) -> String { - match &*self { - Self::PrimaryKey(autoincremental) => - format!("Annotation: PrimaryKey, Autoincremental: {}", autoincremental), - Self::ForeignKey(table, column) => - format!("Annotation: ForeignKey, Table: {}, Column: {}", table, column), - } - } - - fn primary_key_parser(ident: &Ident, attr_args: &Result, syn::Error>) -> syn::Result { - match attr_args { - Ok(name_value) => { - let mut data: HashMap = HashMap::new(); - for nv in name_value { - // The identifier - let attr_value_ident = nv.path.get_ident().unwrap().to_string(); - // The value after the Token[=] - let attr_value = match &nv.lit { - // Error if the token is not a boolean literal - syn::Lit::Bool(v) => v.value(), - _ => { - return Err( - syn::Error::new_spanned( - nv.path.clone(), - format!("Only bool literals are supported for the `{}` attribute", &attr_value_ident) - ) - ) - } - }; - data.insert(attr_value_ident, attr_value); - } - - Ok( - EntityFieldAnnotation::PrimaryKey( - match data.get("autoincremental") { - Some(aut) => aut.to_owned(), - None => { // TODO En vez de error, false para default - return Err( - syn::Error::new_spanned( - ident, - "Missed `autoincremental` argument on the Primary Key annotation".to_string() - ) - ) - }, - }, - ) - ) - }, - Err(_) => Ok( - EntityFieldAnnotation::PrimaryKey(true) - ) - } - } - - fn foreign_key_parser(ident: &Ident, attr_args: &Result, syn::Error>) -> syn::Result { - match attr_args { - Ok(name_value) => { - let mut data: HashMap = HashMap::new(); - - for nv in name_value { - // The identifier - let attr_value_ident = nv.path.get_ident().unwrap().to_string(); - // The value after the Token[=] - let attr_value = match &nv.lit { - // Error if the token is not a string literal - // TODO Implement the option (or change it to) to use a Rust Ident instead a Str Lit - syn::Lit::Str(v) => v.value(), - _ => { - return Err( - syn::Error::new_spanned( - nv.path.clone(), - format!("Only string literals are supported for the `{}` attribute", attr_value_ident) - ) - ) - } - }; - data.insert(attr_value_ident, attr_value); - } - - Ok( - EntityFieldAnnotation::ForeignKey( - match data.get("table") { - Some(table) => table.to_owned(), - None => { - return Err( - syn::Error::new_spanned( - ident, - "Missed `table` argument on the Foreign Key annotation".to_string() - ) - ) - }, - }, - match data.get("column") { - Some(table) => table.to_owned(), - None => { - return Err( - syn::Error::new_spanned( - ident, - "Missed `column` argument on the Foreign Key annotation".to_string() - ) - ) - }, - }, - ) - ) - }, - Err(_) => return Err( - syn::Error::new_spanned( - ident, - "Error generating the Foreign Key".to_string() - ) - ), - } - } -} - - -impl TryFrom<&&Attribute> for EntityFieldAnnotation { - type Error = syn::Error; - - fn try_from(attribute: &&Attribute) -> Result { - let ident = attribute.path.segments[0].ident.clone(); - let name_values: Result, syn::Error> = - attribute.parse_args_with(Punctuated::parse_terminated); - - Ok( - match ident.clone().to_string().as_str() { - "primary_key" => - EntityFieldAnnotation::primary_key_parser(&ident, &name_values)?, - "foreign_key" => - EntityFieldAnnotation::foreign_key_parser(&ident, &name_values)?, - _ => { - return Err( - syn::Error::new_spanned( - ident.clone(), - format!("Unknown attribute `{}`", &ident) - ) - ) - } - } - ) - } -} \ No newline at end of file diff --git a/canyon_observer/Cargo.toml b/canyon_observer/Cargo.toml index 132e1d88..a4c4b0f0 100644 --- a/canyon_observer/Cargo.toml +++ b/canyon_observer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "canyon_observer" -version = "1.0.0" +version = "0.0.1" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -11,11 +11,14 @@ tokio-postgres = { version = "0.7.2" , features=["with-chrono-0_4"] } async-trait = { version = "0.1.50" } regex = "1.5" walkdir = "2" + proc-macro2 = "1.0.27" +syn = { version = "1.0.86", features = ["full", "parsing"] } +quote = "1.0.9" # Debug partialdebug = "0.2.0" -canyon_crud = { path = "../canyon_crud" } -canyon_manager = { path = "../canyon_manager" } -canyon_connection = { path = "../canyon_connection" } \ No newline at end of file +# Internal dependencies +canyon_crud = { version = "0.0.1", path = "../canyon_crud" } +canyon_connection = { version = "0.0.1", path = "../canyon_connection" } \ No newline at end of file diff --git a/canyon_observer/src/constants.rs b/canyon_observer/src/constants.rs index a9c2a9f5..5383a0f2 100644 --- a/canyon_observer/src/constants.rs +++ b/canyon_observer/src/constants.rs @@ -1,5 +1,13 @@ +pub mod queries {} + pub mod postgresql_queries { - pub static FETCH_PUBLIC_SCHEMA: &'static str = + pub static CANYON_MEMORY_TABLE: &str = "CREATE TABLE IF NOT EXISTS canyon_memory ( + id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + filepath VARCHAR NOT NULL, + struct_name VARCHAR NOT NULL + )"; + + pub static FETCH_PUBLIC_SCHEMA: &str = "SELECT gi.table_name, gi.column_name, @@ -7,11 +15,6 @@ pub mod postgresql_queries { gi.character_maximum_length, gi.is_nullable, gi.column_default, - gi.numeric_precision, - gi.numeric_scale, - gi.numeric_precision_radix, - gi.datetime_precision, - gi.interval_type, CASE WHEN starts_with(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT), 'FOREIGN KEY') THEN CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT) ELSE NULL END AS foreign_key_info, CASE WHEN starts_with(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT), 'FOREIGN KEY') @@ -31,34 +34,238 @@ pub mod postgresql_queries { table_schema = 'public';"; } -/// TODO -pub mod regex { - // TODO @gbm25 +pub mod mssql_queries { + pub static CANYON_MEMORY_TABLE: &str = "IF OBJECT_ID(N'[dbo].[canyon_memory]', N'U') IS NULL + BEGIN + CREATE TABLE dbo.canyon_memory ( + id INT PRIMARY KEY IDENTITY, + filepath NVARCHAR(250) NOT NULL, + struct_name NVARCHAR(100) NOT NULL + ); + END"; + + pub static FETCH_PUBLIC_SCHEMA: &str = + "SELECT + gi.table_name, + gi.column_name, + gi.data_type, + CAST(gi.character_maximum_length AS int), + gi.is_nullable, + gi.column_default, + fk.foreign_key_info, + fk.foreign_key_name, + pk.CONSTRAINT_NAME as primary_key_info, + pk.CONSTRAINT_NAME as primary_key_name + FROM INFORMATION_SCHEMA.COLUMNS gi + LEFT JOIN ( + SELECT + SCHEMA_NAME(f.schema_id) schemaName, + OBJECT_NAME(f.parent_object_id) ConstrainedTable, + COL_NAME(fc.parent_object_id, fc.parent_column_id) ConstrainedColumn, + f.name foreign_key_name, + CONCAT('FOREIGN KEY (', + COL_NAME(fc.parent_object_id, fc.parent_column_id), ') REFERENCES ', + OBJECT_NAME(f.referenced_object_id), + '(', + COL_NAME(fc.referenced_object_id, fc.referenced_column_id) + , ')') AS foreign_key_info + FROM + sys.foreign_keys AS f + INNER JOIN + sys.foreign_key_columns AS fc + ON f.OBJECT_ID = fc.constraint_object_id + INNER JOIN + sys.tables t + ON t.OBJECT_ID = fc.referenced_object_id + ) AS fk + ON fk.ConstrainedTable = gi.TABLE_NAME AND fk.ConstrainedColumn = gi.COLUMN_NAME AND gi.TABLE_SCHEMA = fk.schemaName + LEFT JOIN ( + SELECT * + FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu + WHERE OBJECTPROPERTY(OBJECT_ID(kcu.CONSTRAINT_SCHEMA + '.' + QUOTENAME(kcu.CONSTRAINT_NAME)), 'IsPrimaryKey') = 1 + ) AS pk + ON pk.TABLE_NAME = gi.TABLE_NAME AND pk.CONSTRAINT_SCHEMA = gi.TABLE_SCHEMA AND pk.COLUMN_NAME = gi.COLUMN_NAME + WHERE gi.TABLE_SCHEMA = 'dbo'"; } -/// TODO +/// Constant string values that holds regex patterns +pub mod regex_patterns { + pub const EXTRACT_RUST_OPT_REGEX: &str = r"[Oo][Pp][Tt][Ii][Oo][Nn]<(?P[\w<>]+)>"; + pub const EXTRACT_FOREIGN_KEY_INFO: &str = + r"\w+\s\w+\s\((?P\w+)\)\s\w+\s(?P\w+)\((?P\w+)\)"; +} + +/// Constant values that maps the string representation of the Rust +/// built-in types +#[allow(unused)] pub mod rust_type { - pub const I32: &'static str = "i32"; - pub const OPT_I32: &'static str = "Option"; - pub const I64: &'static str = "i64"; - pub const OPT_I64: &'static str = "Option"; - pub const STRING: &'static str = "String"; - pub const OPT_STRING: &'static str = "Option"; - pub const BOOL: &'static str = "bool"; - pub const OPT_BOOL: &'static str = "Option"; - pub const NAIVE_DATE: &'static str = "NaiveDate"; - pub const OPT_NAIVE_DATE: &'static str = "Option"; + pub const I8: &str = "i8"; + pub const OPT_I8: &str = "Option"; + pub const U8: &str = "u8"; + pub const OPT_U8: &str = "Option"; + + pub const I16: &str = "i16"; + pub const OPT_U16: &str = "Option"; + pub const U16: &str = "u16"; + pub const OPT_I16: &str = "Option"; + + pub const I32: &str = "i32"; + pub const OPT_I32: &str = "Option"; + pub const U32: &str = "u32"; + pub const OPT_U32: &str = "Option"; + + pub const I64: &str = "i64"; + pub const OPT_I64: &str = "Option"; + pub const U64: &str = "u64"; + pub const OPT_U64: &str = "Option"; + + pub const F32: &str = "f32"; + pub const OPT_F32: &str = "Option"; + pub const F64: &str = "f64"; + pub const OPT_F64: &str = "Option"; + + pub const STRING: &str = "String"; + pub const OPT_STRING: &str = "Option"; + + pub const BOOL: &str = "bool"; + pub const OPT_BOOL: &str = "Option"; + + pub const NAIVE_DATE: &str = "NaiveDate"; + pub const OPT_NAIVE_DATE: &str = "Option"; + + pub const NAIVE_TIME: &str = "NaiveTime"; + pub const OPT_NAIVE_TIME: &str = "Option"; + + pub const NAIVE_DATE_TIME: &str = "NaiveDateTime"; + pub const OPT_NAIVE_DATE_TIME: &str = "Option"; } /// TODO pub mod postgresql_type { - // TODO @gbm25 + pub const INT_8: &str = "int8"; + pub const SMALL_INT: &str = "smallint"; + pub const INTEGER: &str = "integer"; + pub const BIGINT: &str = "bigint"; + pub const TEXT: &str = "text"; + pub const BOOLEAN: &str = "boolean"; + pub const DATE: &str = "date"; + pub const TIME: &str = "time"; + pub const DATETIME: &str = "timestamp without time zone"; +} + +pub mod sqlserver_type { + pub const TINY_INT: &str = "TINY INT"; + pub const SMALL_INT: &str = "SMALL INT"; + pub const INT: &str = "INT"; + pub const BIGINT: &str = "BIGINT"; + // TODO More information needed, the number of characters may need to be variable and user-defined + pub const NVARCHAR: &str = "nvarchar(max)"; + pub const BIT: &str = "BIT"; + pub const DATE: &str = "DATE"; + pub const TIME: &str = "TIME"; + pub const DATETIME: &str = "DATETIME2"; } /// Contains fragments queries to be invoked as const items and to be concatenated /// with dynamic data -/// +/// /// Ex: ` format!("{} PRIMARY KEY GENERATED ALWAYS AS IDENTITY", postgres_datatype_syntax)` pub mod query_chunk { // TODO @gbm25 -} \ No newline at end of file +} + +pub mod mocked_data { + use canyon_connection::lazy_static::lazy_static; + + use crate::migrations::information_schema::{ColumnMetadata, TableMetadata}; + + lazy_static! { + pub static ref TABLE_METADATA_LEAGUE_EX: TableMetadata = TableMetadata { + table_name: "league".to_string(), + columns: vec![ + ColumnMetadata { + column_name: "id".to_owned(), + datatype: "int".to_owned(), + character_maximum_length: None, + is_nullable: false, + column_default: None, + foreign_key_info: None, + foreign_key_name: None, + primary_key_info: Some("PK__league__3213E83FBDA92571".to_owned()), + primary_key_name: Some("PK__league__3213E83FBDA92571".to_owned()), + is_identity: false, + identity_generation: None + }, + ColumnMetadata { + column_name: "ext_id".to_owned(), + datatype: "bigint".to_owned(), + character_maximum_length: None, + is_nullable: false, + column_default: None, + foreign_key_info: None, + foreign_key_name: None, + primary_key_info: None, + primary_key_name: None, + is_identity: false, + identity_generation: None + }, + ColumnMetadata { + column_name: "slug".to_owned(), + datatype: "nvarchar".to_owned(), + character_maximum_length: None, + is_nullable: false, + column_default: None, + foreign_key_info: None, + foreign_key_name: None, + primary_key_info: None, + primary_key_name: None, + is_identity: false, + identity_generation: None + }, + ColumnMetadata { + column_name: "name".to_owned(), + datatype: "nvarchar".to_owned(), + character_maximum_length: None, + is_nullable: false, + column_default: None, + foreign_key_info: None, + foreign_key_name: None, + primary_key_info: None, + primary_key_name: None, + is_identity: false, + identity_generation: None + }, + ColumnMetadata { + column_name: "region".to_owned(), + datatype: "nvarchar".to_owned(), + character_maximum_length: None, + is_nullable: false, + column_default: None, + foreign_key_info: None, + foreign_key_name: None, + primary_key_info: None, + primary_key_name: None, + is_identity: false, + identity_generation: None + }, + ColumnMetadata { + column_name: "image_url".to_owned(), + datatype: "nvarchar".to_owned(), + character_maximum_length: None, + is_nullable: false, + column_default: None, + foreign_key_info: None, + foreign_key_name: None, + primary_key_info: None, + primary_key_name: None, + is_identity: false, + identity_generation: None + } + ] + }; + pub static ref NON_MATCHING_TABLE_METADATA: TableMetadata = TableMetadata { + table_name: "random_name_to_assert_false".to_string(), + columns: vec![] + }; + } +} diff --git a/canyon_observer/src/handler.rs b/canyon_observer/src/handler.rs deleted file mode 100644 index f7af2e4e..00000000 --- a/canyon_observer/src/handler.rs +++ /dev/null @@ -1,260 +0,0 @@ -use tokio_postgres::{types::Type, Row}; -use partialdebug::placeholder::PartialDebug; - -use canyon_crud::crud::Transaction; - -use super::{ - CANYON_REGISTER_ENTITIES, - memory::CanyonMemory, - postgresql::{ - information_schema::{ - information_schema_row_mapper::{ - RowTable, - RelatedColumn, - ColumnTypeValue - }, - rows_to_table_mapper::{ - DatabaseTable, - DatabaseTableColumn - } - }, - migrations::DatabaseSyncOperations - } -}; - - -/// Provides the necessary entities to let Canyon perform and develop -/// it's full potential, completly managing all the entities written by -/// the user and annotated with the `#[canyon_entity]` -#[derive(PartialDebug)] -pub struct CanyonHandler; - -// Makes this structure able to make queries to the database -impl Transaction for CanyonHandler {} - -impl CanyonHandler { - /// Launches the mechanism to parse the Database schema, the Canyon register - /// and the database table with the memory of Canyon to perform the - /// Migrations to completly handle the necessary database actions - pub async fn run() { - let mut db_operation = DatabaseSyncOperations::new(); - db_operation.fill_operations( - CanyonMemory::remember().await, - CANYON_REGISTER_ENTITIES.lock().unwrap().clone(), - Self::fetch_postgres_database_status().await - ).await; - } - - /** - Fetches the *information schema* of the *public schema* of a `PostgreSQL` database, - in order to retrieve the relation between the tables and it's columns, constraints - configurations... - - ```ignore - table_name column_name data_type is_nullable - --------------------------------------------------------------- - canyon_memory filename character varying NO - canyon_memory id integer NO - canyon_memory struct_name character varying NO - league ext_id bigint YES - league id integer NO - league image_url text YES - league name text YES - league region text YES - league slug text YES - tournament end_date date YES - tournament ext_id bigint YES - tournament id integer NO - tournament league integer YES - tournament slug text YES - tournament start_date date YES - ``` - Not all columns included in the example table. - - Too see all the columns that will be mapeed, see the [`struct@RowTable`] - */ - async fn fetch_postgres_database_status<'b>() -> Vec> { - let results = Self::query( - super::constants::postgresql_queries::FETCH_PUBLIC_SCHEMA, - vec![], - "" - ).await.ok().unwrap().wrapper; - - let mut schema_info: Vec = Vec::new(); - - for res_row in results.iter() { - let unique_table = schema_info.iter_mut().find( |table| { - table.table_name == res_row.get::<&str, String>("table_name") - }); - match unique_table { - Some(table) => { - /* If a table entity it's already present on the collection, we add it - the founded columns related to the table */ - Self::get_row_postgres_columns_for_table(res_row, table); - } - None => { - /* If there's no table for a given "table_name" property on the - collection yet, we must create a new instance and attach it - the founded columns data in this iteration */ - let mut new_table = RowTable { - table_name: res_row.get::<&str, String>("table_name"), - columns: Vec::new(), - }; - Self::get_row_postgres_columns_for_table(res_row, &mut new_table); - schema_info.push(new_table); - } - }; - } - Self::generate_mapped_table_entities(schema_info) - } - - /// Groups all the [`RowTable`] entities that contains the info about a complete table into - /// a single entity of type [`DatabaseTable`] - fn generate_mapped_table_entities<'b>(schema_info: Vec) -> Vec> { - let mut database_tables = Vec::new(); - - for mapped_table in &schema_info { - let unique_database_table = database_tables.iter_mut().find(|table: &&mut DatabaseTable| { - table.table_name == mapped_table.table_name - }); - match unique_database_table { - Some(table) => { - Self::map_splitted_column_info_into_entity( - mapped_table, table, - ) - } - None => { - let mut new_unique_database_table = DatabaseTable { - table_name: mapped_table.table_name.clone(), - columns: Vec::new(), - }; - Self::map_splitted_column_info_into_entity( - mapped_table, &mut new_unique_database_table, - ); - database_tables.push(new_unique_database_table); - } - }; - } - - database_tables - } - - /// Generates the [`DatabaseTableColumn`] elements that represents the metadata and information of a table column - /// and belongs to a concrete [`DatabaseTable`], being them extracted from a [`RowTable`] element that - /// it's related to only one table - fn map_splitted_column_info_into_entity( - mapped_table: &RowTable, - table_entity: &mut DatabaseTable) - { - let mut entity_column = DatabaseTableColumn::new(); - for (idx, column) in mapped_table.columns.iter().enumerate() { - let column_identifier = &column.column_identifier; - if column_identifier == "column_name" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.column_name = value.to_owned().unwrap() - } - } else if column_identifier == "data_type" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.postgres_datatype = value.to_owned().unwrap() - } - } else if column_identifier == "character_maximum_length" { - if let ColumnTypeValue::IntValue(value) = &column.value { - entity_column.character_maximum_length = value.to_owned() - } - } else if column_identifier == "is_nullable" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.is_nullable = matches!(value.as_ref().unwrap().as_str(), "YES") - } - } else if column_identifier == "column_default" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.column_default = value.to_owned() - } - } else if column_identifier == "numeric_precision" { - if let ColumnTypeValue::IntValue(value) = &column.value { - entity_column.numeric_precision = value.to_owned() - } - } else if column_identifier == "numeric_scale" { - if let ColumnTypeValue::IntValue(value) = &column.value { - entity_column.numeric_scale = value.to_owned() - } - } else if column_identifier == "numeric_precision_radix" { - if let ColumnTypeValue::IntValue(value) = &column.value { - entity_column.numeric_precision_radix = value.to_owned() - } - } else if column_identifier == "datetime_precision" { - if let ColumnTypeValue::IntValue(value) = &column.value { - entity_column.datetime_precision = value.to_owned() - } - } else if column_identifier == "interval_type" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.interval_type = value.to_owned() - } - } else if column_identifier == "foreign_key_info" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.foreign_key_info = value.to_owned() - } - } else if column_identifier == "foreign_key_name" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.foreign_key_name = value.to_owned() - } - }else if column_identifier == "primary_key_info" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.primary_key_info = value.to_owned() - } - } else if column_identifier == "primary_key_name" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.primary_key_name = value.to_owned() - } - } else if column_identifier == "is_identity" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.is_identity = matches!(value.as_ref().unwrap().as_str(), "YES") - } - } else if column_identifier == "identity_generation" { - if let ColumnTypeValue::StringValue(value) = &column.value { - entity_column.identity_generation = value.to_owned() - } - }; - // Just for split the related column data into what will be the values for - // every DatabaseTableColumn. - // Every times that we find an &RelatedColumn which column identifier - // is == "identity_generation", we know that we finished to set the values - // for a new DatabaseTableColumn - if &column.column_identifier == "identity_generation" { - table_entity.columns.push(entity_column.clone()); - if idx == mapped_table.columns.len() - 1 { - entity_column = DatabaseTableColumn::new(); - } - } - } - } - - /// Maps a [`tokio_postgres`] [`Row`] from the `information_schema` table into a `Canyon's` [`RowTable`], - /// by extracting every single column in a Row and making a relation between the column's name, - /// the datatype of the value that it's holding, and the value itself. - fn get_row_postgres_columns_for_table(res_row: &Row, table: &mut RowTable) { - for postgre_column in res_row.columns().iter() { - if postgre_column.name() != "table_name" { // Discards the column "table_name" - let mut new_column = RelatedColumn { - column_identifier: postgre_column.name().to_string(), - datatype: postgre_column.type_().to_string(), - value: ColumnTypeValue::NoneValue, - }; - - match *postgre_column.type_() { - Type::NAME | Type::VARCHAR | Type::TEXT => { - new_column.value = ColumnTypeValue::StringValue( - res_row.get::<&str, Option>(postgre_column.name()) - ); - } - Type::INT4 => { - new_column.value = ColumnTypeValue::IntValue( - res_row.get::<&str, Option>(postgre_column.name()) - ); - } - _ => new_column.value = ColumnTypeValue::NoneValue - } - table.columns.push(new_column) - } - } - } -} \ No newline at end of file diff --git a/canyon_observer/src/lib.rs b/canyon_observer/src/lib.rs index f58a17d0..2af5a36f 100644 --- a/canyon_observer/src/lib.rs +++ b/canyon_observer/src/lib.rs @@ -1,32 +1,29 @@ /// Holds the data needed by Canyon when the user /// application it's running. -/// +/// /// Takes care about provide a namespace where retrieve the /// database credentials in only one place -/// +/// /// Takes care about track what data structures Canyon /// should be managing -/// +/// /// Takes care about the queries that Canyon has to execute /// in order to perform the migrations - - -// Database Engine related -pub mod postgresql; +pub mod migrations; extern crate canyon_crud; // The migrator tool -pub mod handler; -mod memory; mod constants; - -use std::sync::Mutex; - -use crate::postgresql::register_types::CanyonRegisterEntity; - - -// lazy_static! { // TODO implement an access control polity by number of times read the static refs - pub static CANYON_REGISTER_ENTITIES: Mutex>> = Mutex::new(Vec::new()); - pub static QUERIES_TO_EXECUTE: Mutex> = Mutex::new(Vec::new()); -// } +pub mod manager; + +use crate::migrations::register_types::CanyonRegisterEntity; +use canyon_connection::lazy_static::lazy_static; +use std::{collections::HashMap, sync::Mutex}; + +pub static CANYON_REGISTER_ENTITIES: Mutex>> = + Mutex::new(Vec::new()); +lazy_static! { + pub static ref QUERIES_TO_EXECUTE: Mutex>> = + Mutex::new(HashMap::new()); +} diff --git a/canyon_manager/src/manager/entity.rs b/canyon_observer/src/manager/entity.rs similarity index 54% rename from canyon_manager/src/manager/entity.rs rename to canyon_observer/src/manager/entity.rs index 2509c6ee..c95c5836 100644 --- a/canyon_manager/src/manager/entity.rs +++ b/canyon_observer/src/manager/entity.rs @@ -1,8 +1,11 @@ -use std::convert::TryFrom; +use partialdebug::placeholder::PartialDebug; use proc_macro2::{Ident, TokenStream}; -use syn::{parse::{Parse, ParseBuffer}, ItemStruct, Visibility, Generics, Attribute}; use quote::quote; -use partialdebug::placeholder::PartialDebug; +use std::convert::TryFrom; +use syn::{ + parse::{Parse, ParseBuffer}, + Attribute, Generics, ItemStruct, Visibility, +}; use super::entity_fields::EntityField; @@ -16,7 +19,7 @@ pub struct CanyonEntity { pub vis: Visibility, pub generics: Generics, pub fields: Vec, - pub attrs: Vec + pub attrs: Vec, } unsafe impl Send for CanyonEntity {} @@ -30,73 +33,80 @@ impl CanyonEntity { pub fn get_fields_as_enum_variants(&self) -> Vec { self.fields .iter() - .map( |f| { + .map(|f| { let field_name = &f.name; - quote!{ #field_name } + quote! { #field_name } }) - .collect::>() + .collect::>() } /// Generates as many variants for the enum as fields has the type - /// which this enum is related to, and that type it's the entity - /// stored in [`CanyonEntity`] - /// - /// Makes a variant `#field_name(#ty)` where `#ty` it's the type - /// of the corresponding field - pub fn get_fields_as_enum_variants_with_type(&self) -> Vec { + /// which this enum is related to. + /// + /// Makes a variant `#field_name(#ty)` where `#ty` it's a trait object + /// of type [`canyon_crud::bounds::QueryParameters`] + pub fn get_fields_as_enum_variants_with_value(&self) -> Vec { self.fields .iter() - .map( |f| { + .map(|f| { let field_name = &f.name; - let ty = &f.field_type; - quote!{ #field_name(#ty) } + quote! { #field_name(&'a dyn canyon_sql::crud::bounds::QueryParameters<'a>) } + }) + .collect::>() + } + + pub fn create_match_arm_for_get_variant_as_str(&self, enum_name: &Ident) -> Vec { + self.fields + .iter() + .map(|f| { + let field_name = &f.name; + let field_name_as_str = f.name.to_string(); + + quote! { + #enum_name::#field_name => #field_name_as_str + } }) - .collect::>() + .collect::>() } /// Generates an implementation of the match pattern to find whatever variant /// is being requested when the method `.field_name_as_str(self)` it's invoked over some - /// instance that implements the `canyon_sql::bounds::FieldIdentifier` trait - pub fn create_match_arm_for_get_variant_as_string(&self, enum_name: &Ident) -> Vec { + /// instance that implements the `canyon_sql::crud::bounds::FieldIdentifier` trait + pub fn create_match_arm_for_get_variant_as_string( + &self, + enum_name: &Ident, + ) -> Vec { self.fields .iter() - .map( |f| { + .map(|f| { let field_name = &f.name; let field_name_as_string = f.name.to_string(); - quote! { + quote! { #enum_name::#field_name => #field_name_as_string.to_string() } }) - .collect::>() + .collect::>() } /// Generates an implementation of the match pattern to find whatever variant /// is being requested when the method `.value()` it's invoked over some - /// instance that implements the `canyon_sql::bounds::FieldValueIdentifier` trait - pub fn create_match_arm_for_relate_fields_with_values(&self, enum_name: &Ident) -> Vec { + /// instance that implements the `canyon_sql::crud::bounds::FieldValueIdentifier` trait + pub fn create_match_arm_for_relate_fields_with_values( + &self, + enum_name: &Ident, + ) -> Vec { self.fields .iter() - .map( |f| { + .map(|f| { let field_name = &f.name; let field_name_as_string = f.name.to_string(); - let field_type_as_string = f.get_field_type_as_string(); - let quote = if field_type_as_string.contains("Option") { - quote! { - #enum_name::#field_name(v) => - format!("{} {}", #field_name_as_string.to_string(), v.unwrap().to_string()) - } - } else { - quote! { - #enum_name::#field_name(v) => - format!("{} {}", #field_name_as_string.clone().to_string(), v.to_string()) - } - }; - - quote + quote! { + #enum_name::#field_name(v) => (#field_name_as_string, v) + } }) - .collect::>() + .collect::>() } pub fn get_attrs_as_token_stream(&self) -> Vec { @@ -105,16 +115,16 @@ impl CanyonEntity { .map(|f| { let name = &f.name; let ty = &f.field_type; - quote!{ pub #name: #ty } + quote! { pub #name: #ty } }) - .collect::>() + .collect::>() } } impl Parse for CanyonEntity { fn parse(input: &ParseBuffer) -> syn::Result { let _struct = input.parse::()?; - + // Retrive the struct fields let mut parsed_fields: Vec = Vec::new(); for field in _struct.fields { @@ -122,16 +132,14 @@ impl Parse for CanyonEntity { parsed_fields.push(struct_attribute) } - Ok( - Self { - struct_name: _struct.ident, - user_table_name: None, - user_schema_name: None, - vis: _struct.vis, - generics: _struct.generics, - fields: parsed_fields, - attrs: _struct.attrs - } - ) + Ok(Self { + struct_name: _struct.ident, + user_table_name: None, + user_schema_name: None, + vis: _struct.vis, + generics: _struct.generics, + fields: parsed_fields, + attrs: _struct.attrs, + }) } } diff --git a/canyon_manager/src/manager/entity_fields.rs b/canyon_observer/src/manager/entity_fields.rs similarity index 78% rename from canyon_manager/src/manager/entity_fields.rs rename to canyon_observer/src/manager/entity_fields.rs index 2ad3c1dc..7dfd63f4 100644 --- a/canyon_manager/src/manager/entity_fields.rs +++ b/canyon_observer/src/manager/entity_fields.rs @@ -1,8 +1,8 @@ -use std::convert::TryFrom; use partialdebug::placeholder::PartialDebug; use proc_macro2::Ident; use quote::ToTokens; -use syn::{Type, Attribute, Field}; +use std::convert::TryFrom; +use syn::{Attribute, Field, Type}; use super::field_annotation::EntityFieldAnnotation; /// Represents any of the fields and annotations (if any valid annotation) found for an Rust struct @@ -35,42 +35,35 @@ impl EntityField { } } - pub fn new(name: &Ident, raw_helper_attributes: &[Attribute], ty: &Type) -> syn::Result { let mut attributes = Vec::new(); for attr in raw_helper_attributes { let result = Some(EntityFieldAnnotation::try_from(&attr)?); match result { Some(res) => attributes.push(res), - None => continue + None => continue, } } - Ok( - Self { - name: name.clone(), - field_type: ty.clone(), - attributes: attributes - } - ) + Ok(Self { + name: name.clone(), + field_type: ty.clone(), + attributes, + }) } } - impl TryFrom<&Field> for EntityField { type Error = syn::Error; fn try_from(field: &Field) -> Result { - let name = field - .ident - .as_ref() - .ok_or_else(|| { - syn::Error::new_spanned( - field.to_token_stream(), - "Expected a structure with named fields, unnamed field given" - ) - })?; - - Self::new(&name, &field.attrs, &field.ty) + let name = field.ident.as_ref().ok_or_else(|| { + syn::Error::new_spanned( + field.to_token_stream(), + "Expected a structure with named fields, unnamed field given", + ) + })?; + + Self::new(name, &field.attrs, &field.ty) } } diff --git a/canyon_observer/src/manager/field_annotation.rs b/canyon_observer/src/manager/field_annotation.rs new file mode 100644 index 00000000..8c01615d --- /dev/null +++ b/canyon_observer/src/manager/field_annotation.rs @@ -0,0 +1,150 @@ +use proc_macro2::Ident; +use std::{collections::HashMap, convert::TryFrom}; +use syn::{punctuated::Punctuated, Attribute, MetaNameValue, Token}; + +/// The available annotations for a field that belongs to any struct +/// annotaded with `#[canyon_entity]` +#[derive(Debug, Clone)] +pub enum EntityFieldAnnotation { + PrimaryKey(bool), + ForeignKey(String, String), +} + +impl EntityFieldAnnotation { + /// Returns the data of the [`EntityFieldAnnotation`] in a understandable format for + /// operations that requires character matching + pub fn get_as_string(&self) -> String { + match self { + Self::PrimaryKey(autoincremental) => { + format!("Annotation: PrimaryKey, Autoincremental: {autoincremental}") + } + Self::ForeignKey(table, column) => { + format!("Annotation: ForeignKey, Table: {table}, Column: {column}") + } + } + } + + /// Retrieves the user defined data in the #[primary_key] attribute + fn primary_key_parser( + ident: &Ident, + attr_args: &Result, syn::Error>, + ) -> syn::Result { + match attr_args { + Ok(name_value) => { + let mut data: HashMap = HashMap::new(); + for nv in name_value { + // The identifier + let attr_value_ident = nv.path.get_ident().unwrap().to_string(); + // The value after the Token[=] + let attr_value = match &nv.lit { + // Error if the token is not a boolean literal + syn::Lit::Bool(v) => v.value(), + _ => { + return Err(syn::Error::new_spanned( + nv.path.clone(), + format!( + "Only bool literals are supported for the `{}` attribute", + &attr_value_ident + ), + )) + } + }; + data.insert(attr_value_ident, attr_value); + } + + Ok(EntityFieldAnnotation::PrimaryKey( + match data.get("autoincremental") { + Some(aut) => aut.to_owned(), + None => { + // TODO En vez de error, false para default + return Err(syn::Error::new_spanned( + ident, + "Missed `autoincremental` argument on the Primary Key annotation" + .to_string(), + )); + } + }, + )) + } + Err(_) => Ok(EntityFieldAnnotation::PrimaryKey(true)), + } + } + + fn foreign_key_parser( + ident: &Ident, + attr_args: &Result, syn::Error>, + ) -> syn::Result { + match attr_args { + Ok(name_value) => { + let mut data: HashMap = HashMap::new(); + + for nv in name_value { + // The identifier + let attr_value_ident = nv.path.get_ident().unwrap().to_string(); + // The value after the Token[=] + let attr_value = match &nv.lit { + // Error if the token is not a string literal + // TODO Implement the option (or change it to) to use a Rust Ident instead a Str Lit + syn::Lit::Str(v) => v.value(), + _ => { + return Err( + syn::Error::new_spanned( + nv.path.clone(), + format!("Only string literals are supported for the `{attr_value_ident}` attribute") + ) + ) + } + }; + data.insert(attr_value_ident, attr_value); + } + + Ok(EntityFieldAnnotation::ForeignKey( + match data.get("table") { + Some(table) => table.to_owned(), + None => { + return Err(syn::Error::new_spanned( + ident, + "Missed `table` argument on the Foreign Key annotation".to_string(), + )) + } + }, + match data.get("column") { + Some(table) => table.to_owned(), + None => { + return Err(syn::Error::new_spanned( + ident, + "Missed `column` argument on the Foreign Key annotation" + .to_string(), + )) + } + }, + )) + } + Err(_) => Err(syn::Error::new_spanned( + ident, + "Error generating the Foreign Key".to_string(), + )), + } + } +} + +impl TryFrom<&&Attribute> for EntityFieldAnnotation { + type Error = syn::Error; + + fn try_from(attribute: &&Attribute) -> Result { + let ident = attribute.path.segments[0].ident.clone(); + let name_values: Result, syn::Error> = + attribute.parse_args_with(Punctuated::parse_terminated); + + Ok(match ident.to_string().as_str() { + "primary_key" => EntityFieldAnnotation::primary_key_parser(&ident, &name_values)?, + "foreign_key" => EntityFieldAnnotation::foreign_key_parser(&ident, &name_values)?, + _ => { + return Err(syn::Error::new_spanned( + ident.clone(), + format!("Unknown attribute `{}`", &ident), + )) + } + }) + } +} diff --git a/canyon_manager/src/manager/manager_builder.rs b/canyon_observer/src/manager/manager_builder.rs similarity index 72% rename from canyon_manager/src/manager/manager_builder.rs rename to canyon_observer/src/manager/manager_builder.rs index 90c2151f..ef592500 100644 --- a/canyon_manager/src/manager/manager_builder.rs +++ b/canyon_observer/src/manager/manager_builder.rs @@ -1,4 +1,4 @@ -use proc_macro2::{TokenStream, Ident, Span}; +use proc_macro2::{Ident, Span, TokenStream}; use quote::quote; use syn::{Attribute, Generics, Visibility}; @@ -24,7 +24,7 @@ pub fn generate_user_struct(canyon_entity: &CanyonEntity) -> TokenStream { /// Auto-generated enum to represent every field of the related type /// as a variant of an enum that it's named with the concatenation /// of the type identifier + Field -/// +/// /// The idea it's to have a representation of the field name as an enum /// variant, avoiding to let the user passing around Strings and instead, /// passing variants of a concrete enumeration type, that when required, @@ -33,15 +33,10 @@ pub fn generate_user_struct(canyon_entity: &CanyonEntity) -> TokenStream { pub fn generate_enum_with_fields(canyon_entity: &CanyonEntity) -> TokenStream { let ty = &canyon_entity.struct_name; let struct_name = canyon_entity.struct_name.to_string(); - let enum_name = Ident::new( - (struct_name + "Field").as_str(), - Span::call_site() - ); + let enum_name = Ident::new((struct_name + "Field").as_str(), Span::call_site()); - let fields_names = &canyon_entity - .get_fields_as_enum_variants(); - let match_arms = &canyon_entity - .create_match_arm_for_get_variant_as_string(&enum_name); + let fields_names = &canyon_entity.get_fields_as_enum_variants(); + let match_arms_str = &canyon_entity.create_match_arm_for_get_variant_as_str(&enum_name); let visibility = &canyon_entity.vis; let generics = &canyon_entity.generics; @@ -54,23 +49,23 @@ pub fn generate_enum_with_fields(canyon_entity: &CanyonEntity) -> TokenStream { /// Auto-generated enum to represent every field of the related type /// as a variant of an enum that it's named with the concatenation /// of the type identifier + Field - /// + /// /// The idea it's to have a representation of the field name as an enum /// variant, avoiding the user to have to pass around Strings and instead, /// passing variants of a concrete enumeration type, that when required, /// will be called though macro code to obtain the &str representation /// of the field name. - /// + /// /// That's particulary useful in Canyon when working with queries being constructed /// through the [`QueryBuilder`], when one of the methods requieres to get /// a column name (which is the name of some field of the type) as a parameter - /// + /// /// ``` /// pub struct League { /// id: i32, /// name: String /// } - /// + /// /// #[derive(Debug)] /// #[allow(non_camel_case_types)] /// pub enum LeagueField { @@ -82,58 +77,46 @@ pub fn generate_enum_with_fields(canyon_entity: &CanyonEntity) -> TokenStream { #(#fields_names),* } - impl #generics canyon_sql::bounds::FieldIdentifier<#ty> for #generics #enum_name #generics { - fn field_name_as_str(self) -> String { - match self { - #(#match_arms),* + impl #generics canyon_sql::crud::bounds::FieldIdentifier<#ty> for #generics #enum_name #generics { + fn as_str(&self) -> &'static str { + match *self { + #(#match_arms_str),* } } } - - impl #generics std::fmt::Display for #enum_name #generics { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "") // TODO - } - } } } /// Autogenerated Rust Enum type that contains as many variants /// with inner value as fields has the structure to which it relates -/// +/// /// The type of the inner value `(Enum::Variant(SomeType))` is the same /// that the field that the variant represents pub fn generate_enum_with_fields_values(canyon_entity: &CanyonEntity) -> TokenStream { let ty = &canyon_entity.struct_name; let struct_name = canyon_entity.struct_name.to_string(); - let enum_name = Ident::new( - (struct_name + "FieldValue").as_str(), - Span::call_site() - ); + let enum_name = Ident::new((struct_name + "FieldValue").as_str(), Span::call_site()); - let fields_names = &canyon_entity - .get_fields_as_enum_variants_with_type(); - let match_arms = &canyon_entity - .create_match_arm_for_relate_fields_with_values(&enum_name); + let fields_names = &canyon_entity.get_fields_as_enum_variants_with_value(); + let match_arms = &canyon_entity.create_match_arm_for_relate_fields_with_values(&enum_name); let visibility = &canyon_entity.vis; - let generics = &canyon_entity.generics; quote! { #[derive(Debug)] #[allow(non_camel_case_types)] #[allow(unused_variables)] #[allow(dead_code)] - /// Auto-generated enumeration to represent each field of the related + /// Auto-generated enumeration to represent each field of the related /// type as a variant, which can support and contain a value of the field data type. - /// + /// /// ``` /// pub struct League { /// id: i32, /// name: String, /// opt: Option /// } - /// + /// /// #[derive(Debug)] /// #[allow(non_camel_case_types)] /// pub enum LeagueFieldValue { @@ -142,22 +125,16 @@ pub fn generate_enum_with_fields_values(canyon_entity: &CanyonEntity) -> TokenSt /// opt(Option) /// } /// ``` - #visibility enum #enum_name #generics { + #visibility enum #enum_name<'a> { #(#fields_names),* } - impl #generics canyon_sql::bounds::FieldValueIdentifier<#ty> for #generics #enum_name #generics { - fn value(self) -> String { + impl<'a> canyon_sql::crud::bounds::FieldValueIdentifier<'a, #ty> for #enum_name<'a> { + fn value(self) -> (&'static str, &'a dyn QueryParameters<'a>) { match self { #(#match_arms),* } } } - - impl #generics std::fmt::Display for #enum_name #generics { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "") // TODO - } - } } -} \ No newline at end of file +} diff --git a/canyon_manager/src/manager/mod.rs b/canyon_observer/src/manager/mod.rs similarity index 71% rename from canyon_manager/src/manager/mod.rs rename to canyon_observer/src/manager/mod.rs index be7b27ec..eca614b8 100644 --- a/canyon_manager/src/manager/mod.rs +++ b/canyon_observer/src/manager/mod.rs @@ -1,4 +1,4 @@ -pub mod manager_builder; pub mod entity; pub mod entity_fields; -pub mod field_annotation; \ No newline at end of file +pub mod field_annotation; +pub mod manager_builder; diff --git a/canyon_observer/src/memory.rs b/canyon_observer/src/memory.rs deleted file mode 100644 index 0d455c73..00000000 --- a/canyon_observer/src/memory.rs +++ /dev/null @@ -1,258 +0,0 @@ -use std::collections::HashMap; -use walkdir::WalkDir; -use std::fs; -use canyon_crud::crud::Transaction; - -use crate::QUERIES_TO_EXECUTE; - -/// Convenient struct that contains the necessary data and operations to implement -/// the `Canyon Memory`. -/// -/// Canyon Memory it's just a convenient way of relate the data of a Rust source -/// code file and the `CanyonEntity` (if so), helping Canyon to know what source -/// file contains a `#[canyon_entity]` annotation and restricting it to just one -/// annotated struct per file. -/// -/// This limitation it's imposed by desing. Canyon, when manages all the entities in -/// the user's source code, needs to know for future migrations the old data about a structure -/// and the new modified one. -/// -/// For example, let's say that you have a: -/// ``` -/// pub struct Person { -/// /* some fields */ -/// } -/// ``` -/// -/// and you decided to modify it's Ident and change it to `Human`. -/// -/// Canyon will take care about modifying the Database, and `ALTER TABLE` to edit the actual data for you, -/// but, if it's not able to get the data to know that the old one is `Person` and the new one it's `Human`. -/// it will simply drop the table (losing all your data) and creating a new table `Human`. -/// -/// So, we decised to follow the next approach: -/// Every entity annotated with a `#[canyon_entity]` annotation will be related to only unique Rust source -/// code file. If we find more, Canyon will raise and error saying that it does not allows to having more than -/// one managed entity per source file. -/// -/// Then, we will store the entities data in a special table only for Canyon, where we will create the relation -/// between the source file, the entity and it's fields and data. -/// -/// So, if the user wants or needs to modify the data of it's entity, Canyon can secure that will perform the -/// correct operations because we can't "remember" how that entity was, and how it should be now, avoiding -/// potencially dangerous operations due to lack of knowing what entity relates with new data. -/// -/// The `memory field` HashMap is made by the filename as a key, and the struct's name as value -#[derive(Debug)] -pub struct CanyonMemory { - pub memory: HashMap, - pub table_rename: HashMap -} - -// Makes this structure able to make queries to the database -impl Transaction for CanyonMemory {} - -impl CanyonMemory { - pub async fn remember() -> Self { - - // Creates the memory table if not exists - Self::create_memory().await; - // Check database for the "memory data" - let mem_results = Self::query( - "SELECT * FROM canyon_memory", - vec![], - "" - ).await - .ok() - .expect("Error querying Canyon Memory") - .wrapper; - - // Manually maps the results - let mut memory_db_rows = Vec::new(); - // let mut memory_rows_to_delete = Vec::new(); - // Cando non a encontres no parseo de archivos, acumulas no array - // Tremendísima query con WHERE IN (45) - for row in mem_results { - let db_row = CanyonMemoryDatabaseRow { - id: row.get::<&str, i32>("id"), - filename: row.get::<&str, String>("filename"), - struct_name: row.get::<&str, String>("struct_name"), - }; - memory_db_rows.push(db_row); - } - - - // Parses the source code files looking for the #[canyon_entity] annotated classes - let mut mem = Self { - memory: HashMap::new(), - table_rename: HashMap::new(), - }; - Self::find_canyon_entity_annotated_structs(&mut mem).await; - - - // Insert into the memory table the new discovered entities - // Care, insert the new ones, delete the olds - // Also, updates the registry when the fields changes - let mut values_to_insert = String::new(); - let mut updates = Vec::new(); - - for (filename, struct_name) in &mem.memory { - // When the filename and the struct hasn't been modified and are already on db - let already_in_db = memory_db_rows - .iter() - .any( |el| - { - (el.filename == *filename && el.struct_name == *struct_name) || - ( - (el.filename != *filename && el.struct_name == *struct_name) || - (el.filename == *filename && el.struct_name != *struct_name) - ) - } - ); - if !already_in_db { - values_to_insert.push_str( - format!("('{}', '{}'),", filename, struct_name).as_str() - ); - } - // When the struct or the filename it's already on db but one of the two has been modified - let need_to_update = memory_db_rows - .iter() - .filter( |el| - { - (el.filename == *filename || el.struct_name == *struct_name) && - !(el.filename == *filename && el.struct_name == *struct_name) - } - ).next(); - - if let Some(update) = need_to_update { - updates.push(struct_name); - QUERIES_TO_EXECUTE.lock().unwrap().push( - format!( - "UPDATE canyon_memory SET filename = '{}', struct_name = '{}' \ - WHERE id = {}", - filename, struct_name, update.id - ) - ); - - // if the updated element is the struct name, whe add it to the table_rename Hashmap - let rename_table = &update.struct_name != struct_name; - - if rename_table { - mem.table_rename.insert( struct_name.clone().to_lowercase(),update.struct_name.clone().to_lowercase()); - } - } - } - - - if values_to_insert != String::new() { - values_to_insert.pop(); - values_to_insert.push_str(";"); - - QUERIES_TO_EXECUTE.lock().unwrap().push( - format!( - "INSERT INTO canyon_memory (filename, struct_name) VALUES {}", - values_to_insert - ) - ); - } - - // Deletes the records when a table is dropped on the previous Canyon run - let in_memory = mem.memory.values() - .collect::>(); - memory_db_rows.into_iter() - .for_each( |db_row| - { - if !in_memory.contains(&&db_row.struct_name) && - !updates.contains(&&db_row.struct_name) - { - QUERIES_TO_EXECUTE.lock().unwrap().push( - format!( - "DELETE FROM canyon_memory WHERE struct_name = '{}'", - db_row.struct_name - ) - ); - } - } - ); - - mem - } - - /// Parses the Rust source code files to find the one who contains Canyon entities - /// ie -> annotated with `#{canyon_entity}` - async fn find_canyon_entity_annotated_structs(&mut self) { - for file in WalkDir::new("./src").into_iter().filter_map(|file| file.ok()) { - if file.metadata().unwrap().is_file() - && file.path().display().to_string().ends_with(".rs") - { - // Opening the source code file - let contents = fs::read_to_string(file.path()) - .expect("Something went wrong reading the file"); - - let mut canyon_entity_macro_counter = 0; - let mut struct_name = String::new(); - for line in contents.split("\n") { - if line.starts_with("pub struct") { - struct_name.push_str( - line.split_whitespace() - .collect::>() - .get(2) - .unwrap_or(&"FAILED") - ) - } - if line.contains("#[") && line.contains("canyon_entity") - && !line.starts_with("//") - { - canyon_entity_macro_counter += 1; - } - } - - // If more than two, we panic! - if canyon_entity_macro_counter > 1 { - panic!( - r"Canyon does not support having multiple structs annotated\ - with `#[canyon::entity]` on the same file when the `#[canyon]`\ - macro it's present on the program" - ) - } else if canyon_entity_macro_counter == 1 { - self.memory.insert( - file.path() - .display() - .to_string() - .replace("\\", "/") - .split("/") - .collect::>() - .last() - .unwrap_or(&"FAILED") - .to_string(), - struct_name - ); - } - } - } - } - - /// Generates, if not exists the `canyon_memory` table - async fn create_memory() { - Self::query( - "CREATE TABLE IF NOT EXISTS canyon_memory \ - ( id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY, \ - filename VARCHAR NOT NULL, struct_name VARCHAR NOT NULL - )", - vec![], - "" - ).await - .ok() - .expect("Error creating the 'canyon_memory' table") - .wrapper; - } -} - - -/// Represents a single row from the `canyon_memory` table -#[derive(Debug)] -struct CanyonMemoryDatabaseRow { - id: i32, - filename: String, - struct_name: String -} \ No newline at end of file diff --git a/canyon_observer/src/migrations/handler.rs b/canyon_observer/src/migrations/handler.rs new file mode 100644 index 00000000..4922f7d9 --- /dev/null +++ b/canyon_observer/src/migrations/handler.rs @@ -0,0 +1,221 @@ +use canyon_connection::{datasources::Migrations as MigrationsStatus, DATASOURCES}; +use partialdebug::placeholder::PartialDebug; + +use crate::{ + canyon_crud::{ + bounds::{Column, Row, RowOperations}, + crud::Transaction, + result::DatabaseResult, + DatabaseType, + }, + constants, + migrations::{ + information_schema::{ColumnMetadata, ColumnMetadataTypeValue, TableMetadata}, + memory::CanyonMemory, + processor::MigrationsProcessor, + }, + CANYON_REGISTER_ENTITIES, +}; + +#[derive(PartialDebug)] +pub struct Migrations; +// Makes this structure able to make queries to the database +impl Transaction for Migrations {} + +impl Migrations { + /// Launches the mechanism to parse the Database schema, the Canyon register + /// and the database table with the memory of Canyon to perform the + /// migrations over the targeted database + pub async fn migrate() { + for datasource in DATASOURCES.iter() { + if datasource + .properties + .migrations + .filter(|status| !status.eq(&MigrationsStatus::Disabled)) + .is_none() + { + println!( + "Skipped datasource: {:?} for being disabled (or not configured)", + datasource.name + ); + continue; + } + println!( + "Processing migrations for datasource: {:?}", + datasource.name + ); + + let mut migrations_processor = MigrationsProcessor::default(); + + let canyon_memory = CanyonMemory::remember(datasource).await; + let canyon_tables = CANYON_REGISTER_ENTITIES.lock().unwrap().to_vec(); + + // Tracked entities that must be migrated whenever Canyon starts + let schema_status = + Self::fetch_database(datasource.name, datasource.properties.db_type).await; + let database_tables_schema_info = Self::map_rows(schema_status); + + // We filter the tables from the schema that aren't Canyon entities + let mut user_database_tables = vec![]; + for parsed_table in database_tables_schema_info.iter() { + if canyon_memory + .memory + .values() + .any(|f| f.to_lowercase() == parsed_table.table_name) + || canyon_memory + .renamed_entities + .values() + .any(|f| *f == parsed_table.table_name.to_lowercase()) + { + user_database_tables.append(&mut vec![parsed_table]); + } + } + + migrations_processor + .process( + canyon_memory, + canyon_tables, + user_database_tables, + datasource, + ) + .await; + } + } + + /// Fetches a concrete schema metadata by target the database + /// choosed by it's datasource name property + async fn fetch_database( + datasource_name: &str, + db_type: DatabaseType, + ) -> DatabaseResult { + let query = match db_type { + DatabaseType::PostgreSql => constants::postgresql_queries::FETCH_PUBLIC_SCHEMA, + DatabaseType::SqlServer => constants::mssql_queries::FETCH_PUBLIC_SCHEMA, + }; + + Self::query(query, [], datasource_name) + .await + .unwrap_or_else(|_| { + panic!( + "Error querying the schema information for the datasource: {datasource_name}" + ) + }) + } + + /// Handler for parse the result of query the information of some database schema, + /// and extract the content of the returned rows into custom structures with + /// the data well organized for every entity present on that schema + fn map_rows(db_results: DatabaseResult) -> Vec { + let mut schema_info: Vec = Vec::new(); + + for res_row in db_results.as_canyon_rows().into_iter() { + let unique_table = schema_info + .iter_mut() + .find(|table| table.table_name == *res_row.get::<&str>("table_name").to_owned()); + match unique_table { + Some(table) => { + /* If a table entity it's already present on the collection, we add it + the founded columns related to the table */ + Self::get_columns_metadata(res_row, table); + } + None => { + /* If there's no table for a given "table_name" property on the + collection yet, we must create a new instance and attach it + the founded columns data in this iteration */ + let mut new_table = TableMetadata { + table_name: res_row.get::<&str>("table_name").to_owned(), + columns: Vec::new(), + }; + Self::get_columns_metadata(res_row, &mut new_table); + schema_info.push(new_table); + } + }; + } + + schema_info + } + + /// Parses all the [`Row`] after query the information of the targeted schema, + /// grouping them in [`TableMetadata`] structs, by relating every [`Row`] that has + /// the same "table_name" (asked with column.name()) being one field of the new + /// [`TableMetadata`], and parsing the other columns that belongs to that entity + /// and appending as a new [`ColumnMetadata`] element to the columns field. + fn get_columns_metadata(res_row: &dyn Row, table: &mut TableMetadata) { + let mut entity_column = ColumnMetadata::default(); + for column in res_row.columns().iter() { + if column.name() != "table_name" { + Self::set_column_metadata(res_row, column, &mut entity_column); + } // Discards the column "table_name", 'cause is already a field of [`TableMetadata`] + } + table.columns.push(entity_column); + } + + /// Sets the concrete value for a field of a [`ColumnMetadata`], by reading the properties + /// of the source [`Column`], filtering the target value by the source property `column name` + fn set_column_metadata(row: &dyn Row, src: &Column, dest: &mut ColumnMetadata) { + let column_identifier = src.name(); + let column_value = ColumnMetadataTypeValue::get_value(row, src); + + if column_identifier == "column_name" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.column_name = value + .to_owned() + .expect("[MIGRATIONS - set_column_metadata -> column_name]") + } + } else if column_identifier == "data_type" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.datatype = value + .to_owned() + .expect("[MIGRATIONS - set_column_metadata -> data_type]") + } + } else if column_identifier == "character_maximum_length" { + if let ColumnMetadataTypeValue::IntValue(value) = &column_value { + dest.character_maximum_length = value.to_owned() + } + } else if column_identifier == "is_nullable" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.is_nullable = matches!( + value + .as_ref() + .expect("[MIGRATIONS - set_column_metadata -> is_nullable]") + .as_str(), + "YES" + ) + } + } else if column_identifier == "column_default" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.column_default = value.to_owned() + } + } else if column_identifier == "foreign_key_info" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.foreign_key_info = value.to_owned() + } + } else if column_identifier == "foreign_key_name" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.foreign_key_name = value.to_owned() + } + } else if column_identifier == "primary_key_info" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.primary_key_info = value.to_owned() + } + } else if column_identifier == "primary_key_name" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.primary_key_name = value.to_owned() + } + } else if column_identifier == "is_identity" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.is_identity = matches!( + value + .as_ref() + .expect("[MIGRATIONS - set_column_metadata -> is_identity]") + .as_str(), + "YES" + ) + } + } else if column_identifier == "identity_generation" { + if let ColumnMetadataTypeValue::StringValue(value) = &column_value { + dest.identity_generation = value.to_owned() + } + }; + } +} diff --git a/canyon_observer/src/migrations/information_schema.rs b/canyon_observer/src/migrations/information_schema.rs new file mode 100644 index 00000000..bdf9f48e --- /dev/null +++ b/canyon_observer/src/migrations/information_schema.rs @@ -0,0 +1,63 @@ +use canyon_connection::{tiberius::ColumnType as TIB_TY, tokio_postgres::types::Type as TP_TYP}; +use canyon_crud::bounds::{Column, ColumnType, Row, RowOperations}; + +/// Model that represents the database entities that belongs to the current schema. +/// +/// Basically, it's an agrupation of rows of results when Canyon queries the `information schema` +/// table, grouping by table name (one [`TableMetadata`] is the rows that contains the information +/// of a table) +#[derive(Debug)] +pub struct TableMetadata { + pub table_name: String, + pub columns: Vec, +} + +/// Represents the *metadata* associated with a column that belongs to a `PostgreSQL` table. +#[derive(Debug, Default)] +pub struct ColumnMetadata { + pub column_name: String, + pub datatype: String, + pub character_maximum_length: Option, + pub is_nullable: bool, // Care, postgres type is varchar + pub column_default: Option, + pub foreign_key_info: Option, + pub foreign_key_name: Option, + pub primary_key_info: Option, + pub primary_key_name: Option, + pub is_identity: bool, // Care, postgres type is varchar + pub identity_generation: Option, +} + +/// Represents the relation between a real value stored inside a [`ColumnMetadata`] +/// and the datatype of that value +#[derive(Debug)] +pub enum ColumnMetadataTypeValue { + StringValue(Option), + IntValue(Option), + NoneValue, +} +impl ColumnMetadataTypeValue { + /// Retrieves the value stored in a [`Column`] for a passed [`Row`] + pub fn get_value(row: &dyn Row, col: &Column) -> Self { + match col.column_type() { + ColumnType::Postgres(v) => { + match *v { + TP_TYP::NAME | TP_TYP::VARCHAR | TP_TYP::TEXT => { + Self::StringValue(row.get_opt::<&str>(col.name()).map(|opt| opt.to_owned())) + } + TP_TYP::INT4 => Self::IntValue(row.get_opt::(col.name())), + _ => Self::NoneValue, // TODO watchout this one + } + } + ColumnType::SqlServer(v) => match v { + TIB_TY::NChar | TIB_TY::NVarchar | TIB_TY::BigChar | TIB_TY::BigVarChar => { + Self::StringValue(row.get_opt::<&str>(col.name()).map(|opt| opt.to_owned())) + } + TIB_TY::Int2 | TIB_TY::Int4 | TIB_TY::Int8 | TIB_TY::Intn => { + Self::IntValue(row.get_opt::(col.name())) + } + _ => Self::NoneValue, + }, + } + } +} diff --git a/canyon_observer/src/migrations/memory.rs b/canyon_observer/src/migrations/memory.rs new file mode 100644 index 00000000..2c81a952 --- /dev/null +++ b/canyon_observer/src/migrations/memory.rs @@ -0,0 +1,285 @@ +use canyon_crud::{bounds::RowOperations, crud::Transaction, DatabaseType, DatasourceConfig}; +use std::collections::HashMap; +use std::fs; +use walkdir::WalkDir; + +use crate::{constants, QUERIES_TO_EXECUTE}; + +/// Convenient struct that contains the necessary data and operations to implement +/// the `Canyon Memory`. +/// +/// Canyon Memory it's just a convenient way of relate the data of a Rust source +/// code file and the `CanyonEntity` (if so), helping Canyon to know what source +/// file contains a `#[canyon_entity]` annotation and restricting it to just one +/// annotated struct per file. +/// +/// This limitation it's imposed by desing. Canyon, when manages all the entities in +/// the user's source code, needs to know for future migrations the old data about a structure +/// and the new modified one. +/// +/// For example, let's say that you have a: +/// ``` +/// pub struct Person { +/// /* some fields */ +/// } +/// ``` +/// +/// and you decided to modify it's Ident and change it to `Human`. +/// +/// Canyon will take care about modifying the Database, and `ALTER TABLE` to edit the actual data for you, +/// but, if it's not able to get the data to know that the old one is `Person` and the new one it's `Human`. +/// it will simply drop the table (losing all your data) and creating a new table `Human`. +/// +/// So, we decised to follow the next approach: +/// Every entity annotated with a `#[canyon_entity]` annotation will be related to only unique Rust source +/// code file. If we find more, Canyon will raise and error saying that it does not allows to having more than +/// one managed entity per source file. +/// +/// Then, we will store the entities data in a special table only for Canyon, where we will create the relation +/// between the source file, the entity and it's fields and data. +/// +/// So, if the user wants or needs to modify the data of it's entity, Canyon can secure that will perform the +/// correct operations because we can't "remember" how that entity was, and how it should be now, avoiding +/// potencially dangerous operations due to lack of knowing what entity relates with new data. +/// +/// The `memory field` HashMap is made by the filepath as a key, and the struct's name as value +#[derive(Debug)] +pub struct CanyonMemory { + pub memory: HashMap, + pub renamed_entities: HashMap, +} + +// Makes this structure able to make queries to the database +impl Transaction for CanyonMemory {} + +impl CanyonMemory { + /// Queries the database to retrieve internal data about the structures + /// tracked by `CanyonSQL` + /// + /// TODO fetch schemas if structures have not default ones + #[allow(clippy::nonminimal_bool)] + pub async fn remember(datasource: &DatasourceConfig<'static>) -> Self { + // Creates the memory table if not exists + Self::create_memory(datasource.name, &datasource.properties.db_type).await; + + // Retrieve the last status data from the `canyon_memory` table + // TODO still pending on the target schema, for now they are created on the default one + let res = Self::query("SELECT * FROM canyon_memory", [], datasource.name) + .await + .expect("Error querying Canyon Memory"); + let mem_results = res.as_canyon_rows(); + + // Manually maps the results + let mut db_rows = Vec::new(); + for row in mem_results.iter() { + let db_row = CanyonMemoryRow { + id: row.get::("id"), + filepath: row.get::<&str>("filepath"), + struct_name: row.get::<&str>("struct_name"), + }; + db_rows.push(db_row); + } + + // Parses the source code files looking for the #[canyon_entity] annotated classes + let mut mem = Self { + memory: HashMap::new(), + renamed_entities: HashMap::new(), + }; + Self::find_canyon_entity_annotated_structs(&mut mem).await; + + // Insert into the memory table the new discovered entities + // Care, insert the new ones, delete the olds + // Also, updates the registry when the fields changes + let mut values_to_insert = String::new(); + let mut updates = Vec::new(); + + for (filepath, struct_name) in &mem.memory { + // When the filepath and the struct hasn't been modified and are already on db + let already_in_db = db_rows.iter().any(|el| { + (el.filepath == *filepath && el.struct_name == *struct_name) + || ((el.filepath != *filepath && el.struct_name == *struct_name) + || (el.filepath == *filepath && el.struct_name != *struct_name)) + }); + if !already_in_db { + values_to_insert.push_str(format!("('{filepath}', '{struct_name}'),").as_str()); + } + // When the struct or the filepath it's already on db but one of the two has been modified + let need_to_update = db_rows.iter().find(|el| { + (el.filepath == *filepath || el.struct_name == *struct_name) + && !(el.filepath == *filepath && el.struct_name == *struct_name) + }); + + // updated means: the old one. The value to update + if let Some(old) = need_to_update { + updates.push(old.struct_name); + let stmt = format!( + "UPDATE canyon_memory SET filepath = '{}', struct_name = '{}' \ + WHERE id = {}", + filepath, struct_name, old.id + ); + + if QUERIES_TO_EXECUTE + .lock() + .unwrap() + .contains_key(datasource.name) + { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .get_mut(datasource.name) + .unwrap() + .push(stmt); + } else { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .insert(datasource.name, vec![stmt]); + } + + // if the updated element is the struct name, whe add it to the table_rename Hashmap + let rename_table = old.struct_name != struct_name; + + if rename_table { + mem.renamed_entities.insert( + struct_name.to_lowercase(), // The new one + old.struct_name.to_lowercase(), // The old one + ); + } + } + } + + if !values_to_insert.is_empty() { + values_to_insert.pop(); + values_to_insert.push(';'); + + let stmt = format!( + "INSERT INTO canyon_memory (filepath, struct_name) VALUES {values_to_insert}" + ); + + if QUERIES_TO_EXECUTE + .lock() + .unwrap() + .contains_key(datasource.name) + { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .get_mut(datasource.name) + .unwrap() + .push(stmt); + } else { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .insert(datasource.name, vec![stmt]); + } + } + + // Deletes the records when a table is dropped on the previous Canyon run + let in_memory = mem.memory.values().collect::>(); + db_rows.into_iter().for_each(|db_row| { + if !in_memory.contains(&&db_row.struct_name.to_string()) + && !updates.contains(&db_row.struct_name) + { + let stmt = format!( + "DELETE FROM canyon_memory WHERE struct_name = '{}'", + db_row.struct_name + ); + + if QUERIES_TO_EXECUTE + .lock() + .unwrap() + .contains_key(datasource.name) + { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .get_mut(datasource.name) + .unwrap() + .push(stmt); + } else { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .insert(datasource.name, vec![stmt]); + } + } + }); + + mem + } + + /// Parses the Rust source code files to find the one who contains Canyon entities + /// ie -> annotated with `#{canyon_entity}` + async fn find_canyon_entity_annotated_structs(&mut self) { + for file in WalkDir::new("./src") + .into_iter() + .filter_map(|file| file.ok()) + { + if file.metadata().unwrap().is_file() + && file.path().display().to_string().ends_with(".rs") + { + // Opening the source code file + let contents = + fs::read_to_string(file.path()).expect("Something went wrong reading the file"); + + let mut canyon_entity_macro_counter = 0; + let mut struct_name = String::new(); + for line in contents.split('\n') { + if !line.starts_with("//") && line.contains("struct") { + struct_name.push_str( + line.split_whitespace() + .collect::>() + .get(2) + .unwrap_or(&"FAILED"), + ) + } + if line.contains("#[") // separated checks for possible different paths + && line.contains("canyon_entity") + && !line.starts_with("//") + { + canyon_entity_macro_counter += 1; + } + } + + // This limitation will be removed in future versions, when the memory + // will be able to track every aspect of an entity + match canyon_entity_macro_counter { + 0 => (), + 1 => { + self.memory.insert( + file.path().display().to_string().replace('\\', "/"), + struct_name, + ); + } + _ => panic!( + "Canyon does not support having multiple structs annotated + with `#[canyon::entity]` on the same file when the `#[canyon]` + macro it's present on the program" + ), + } + } + } + } + + /// Generates, if not exists the `canyon_memory` table + async fn create_memory(datasource_name: &str, database_type: &DatabaseType) { + let query = if database_type == &DatabaseType::PostgreSql { + constants::postgresql_queries::CANYON_MEMORY_TABLE + } else { + constants::mssql_queries::CANYON_MEMORY_TABLE + }; + + Self::query(query, [], datasource_name) + .await + .expect("Error creating the 'canyon_memory' table"); + } +} + +/// Represents a single row from the `canyon_memory` table +#[derive(Debug)] +struct CanyonMemoryRow<'a> { + id: i32, + filepath: &'a str, + struct_name: &'a str, +} diff --git a/canyon_observer/src/migrations/mod.rs b/canyon_observer/src/migrations/mod.rs new file mode 100644 index 00000000..525cbc10 --- /dev/null +++ b/canyon_observer/src/migrations/mod.rs @@ -0,0 +1,5 @@ +pub mod handler; +pub mod information_schema; +pub mod memory; +pub mod processor; +pub mod register_types; diff --git a/canyon_observer/src/migrations/processor.rs b/canyon_observer/src/migrations/processor.rs new file mode 100644 index 00000000..1301d6fe --- /dev/null +++ b/canyon_observer/src/migrations/processor.rs @@ -0,0 +1,1026 @@ +///! File that contains all the datatypes and logic to perform the migrations +///! over a target database +use async_trait::async_trait; +use canyon_crud::DatabaseType; +use regex::Regex; +use std::collections::HashMap; +use std::fmt::Debug; +use std::ops::Not; + +use crate::canyon_crud::{crud::Transaction, DatasourceConfig}; +use crate::constants::regex_patterns; +use crate::QUERIES_TO_EXECUTE; + +use super::information_schema::{ColumnMetadata, TableMetadata}; +use super::memory::CanyonMemory; +use super::register_types::{CanyonRegisterEntity, CanyonRegisterEntityField}; + +/// Responsible of generating the queries to sync the database status with the +/// Rust source code managed by Canyon, for succesfully make the migrations +#[derive(Debug, Default)] +pub struct MigrationsProcessor { + operations: Vec>, + set_primary_key_operations: Vec>, + drop_primary_key_operations: Vec>, + constraints_operations: Vec>, +} +impl Transaction for MigrationsProcessor {} + +impl MigrationsProcessor { + pub async fn process<'a>( + &'a mut self, + canyon_memory: CanyonMemory, + canyon_entities: Vec>, + database_tables: Vec<&'a TableMetadata>, + datasource: &'_ DatasourceConfig<'static>, + ) { + // The database type formally represented in Canyon + let db_type = datasource.properties.db_type; + // For each entity (table) on the register (Rust structs) + for canyon_register_entity in canyon_entities { + // TODO Check if its disabled for the current datasource + let entity_name = canyon_register_entity.entity_name.to_lowercase(); + + // 1st operation -> + self.create_or_rename_tables( + &canyon_memory, + entity_name.as_str(), + canyon_register_entity.entity_fields.clone(), + &database_tables, + ); + + let current_table_metadata = MigrationsHelper::get_current_table_metadata( + &canyon_memory, + entity_name.as_str(), + &database_tables, + ); + + self.delete_fields( + entity_name.as_str(), + canyon_register_entity.entity_fields.clone(), + current_table_metadata, + db_type, + ); + + // For each field (column) on the this canyon register entity + for canyon_register_field in canyon_register_entity.entity_fields { + let current_column_metadata = MigrationsHelper::get_current_column_metadata( + canyon_register_field.field_name.clone(), + current_table_metadata, + ); + + // We only create or modify (right now only datatype) + // the column when the database already contains the table, + // if not, the columns are already create in the previous operation (create table) + if current_table_metadata.is_some() { + self.create_or_modify_field( + entity_name.as_str(), + db_type, + canyon_register_field.clone(), + current_column_metadata, + ) + } + + // Time to check annotations for the current column + // Case when we only need to add contrains + if (current_table_metadata.is_none() + && !canyon_register_field.annotations.is_empty()) + || (current_table_metadata.is_some() && current_column_metadata.is_none()) + { + self.add_constraints(entity_name.as_str(), canyon_register_field.clone()) + } + + // Case when we need to compare the entity with the database contain + if current_table_metadata.is_some() && current_column_metadata.is_some() { + self.add_modify_or_remove_constraints( + entity_name.as_str(), + canyon_register_field, + current_column_metadata.unwrap(), + ) + } + } + } + + for operation in &self.operations { + operation.generate_sql(datasource).await; // This should be moved again to runtime + } + for operation in &self.drop_primary_key_operations { + operation.generate_sql(datasource).await; // This should be moved again to runtime + } + for operation in &self.set_primary_key_operations { + operation.generate_sql(datasource).await; // This should be moved again to runtime + } + for operation in &self.constraints_operations { + operation.generate_sql(datasource).await; // This should be moved again to runtime + } + // TODO Still pending to decouple de executions of cargo check to skip the process if this + // code is not processed by cargo build or cargo run + // Self::from_query_register(datasource_name).await; + } + + /// The operation that checks if an entity must be update is name in the database + fn create_or_rename_tables<'a>( + &mut self, + canyon_memory: &'_ CanyonMemory, + entity_name: &'a str, + entity_fields: Vec, + database_tables: &'a [&'a TableMetadata], + ) { + // 1st operation -> Check if the current entity is already on the target database. + // If isn't present (this if case), we + if !MigrationsHelper::entity_already_on_database(entity_name, database_tables) { + // [`CanyonMemory`] holds a HashMap with the tables who changed their name in + // the Rust side. If this table name is present, we don't create a new table, + // just rename the already known one + if canyon_memory.renamed_entities.contains_key(entity_name) { + self.table_rename( + canyon_memory + .renamed_entities + .get(entity_name) // Get the old entity name (the value) + .unwrap() + .to_owned(), + entity_name.to_string(), // Set the new table name + ) + } else { + self.create_table(entity_name.to_string(), entity_fields) + } + } + } + + /// Generates a database agnostic query to change the name of a table + fn create_table(&mut self, table_name: String, entity_fields: Vec) { + self.operations.push(Box::new(TableOperation::CreateTable( + table_name, + entity_fields, + ))); + } + + /// Generates a database agnostic query to change the name of a table + fn table_rename(&mut self, old_table_name: String, new_table_name: String) { + self.operations + .push(Box::new(TableOperation::AlterTableName( + old_table_name, + new_table_name, + ))); + } + + // Creates or modify (currently only datatype) a column for a given canyon register entity field + fn delete_fields<'a>( + &mut self, + entity_name: &'a str, + entity_fields: Vec, + current_table_metadata: Option<&'a TableMetadata>, + db_type: DatabaseType, + ) { + if current_table_metadata.is_none() { + return; + } + let columns_name_to_delete: Vec<&ColumnMetadata> = current_table_metadata + .unwrap() + .columns + .iter() + .filter(|db_column| { + entity_fields + .iter() + .map(|canyon_field| canyon_field.field_name.to_string()) + .any(|canyon_field| canyon_field == db_column.column_name) + .not() + }) + .collect(); + + for column_metadata in columns_name_to_delete { + if db_type == DatabaseType::SqlServer && !column_metadata.is_nullable { + self.drop_column_not_null( + entity_name, + column_metadata.column_name.clone(), + MigrationsHelper::get_datatype_from_column_metadata(column_metadata), + ) + } + self.delete_column(entity_name, column_metadata.column_name.clone()); + } + } + + // Creates or modify (currently only datatype) a column for a given canyon register entity field + fn create_or_modify_field( + &mut self, + entity_name: &str, + db_type: DatabaseType, + canyon_register_entity_field: CanyonRegisterEntityField, + current_column_metadata: Option<&ColumnMetadata>, + ) { + // If we do not retrieve data for this database column, it does not exist yet + // and therefore it has to be created + if current_column_metadata.is_none() { + self.create_column(entity_name.to_string(), canyon_register_entity_field) + } else if !MigrationsHelper::is_same_datatype( + db_type, + &canyon_register_entity_field, + current_column_metadata.unwrap(), + ) { + self.change_column_datatype(entity_name.to_string(), canyon_register_entity_field) + } + } + + fn delete_column(&mut self, table_name: &str, column_name: String) { + self.operations.push(Box::new(ColumnOperation::DeleteColumn( + table_name.to_string(), + column_name, + ))); + } + + fn drop_column_not_null( + &mut self, + table_name: &str, + column_name: String, + column_datatype: String, + ) { + self.operations + .push(Box::new(ColumnOperation::DropNotNullBeforeDropColumn( + table_name.to_string(), + column_name, + column_datatype, + ))); + } + + fn create_column(&mut self, table_name: String, field: CanyonRegisterEntityField) { + self.operations + .push(Box::new(ColumnOperation::CreateColumn(table_name, field))); + } + + fn change_column_datatype(&mut self, table_name: String, field: CanyonRegisterEntityField) { + self.operations + .push(Box::new(ColumnOperation::AlterColumnType( + table_name, field, + ))); + } + + fn add_constraints( + &mut self, + entity_name: &str, + canyon_register_entity_field: CanyonRegisterEntityField, + ) { + for attr in &canyon_register_entity_field.annotations { + if attr.starts_with("Annotation: ForeignKey") { + let annotation_data = MigrationsHelper::extract_foreign_key_annotation( + &canyon_register_entity_field.annotations, + ); + + let table_to_reference = annotation_data.0; + let column_to_reference = annotation_data.1; + + let foreign_key_name = format!( + "{entity_name}_{}_fkey", + &canyon_register_entity_field.field_name + ); + + Self::add_foreign_key( + self, + entity_name, + foreign_key_name, + table_to_reference, + column_to_reference, + &canyon_register_entity_field, + ); + } + if attr.starts_with("Annotation: PrimaryKey") { + Self::add_primary_key(self, entity_name, canyon_register_entity_field.clone()); + + if canyon_register_entity_field.is_autoincremental() { + Self::add_identity(self, entity_name, canyon_register_entity_field.clone()); + } + } + } + } + + fn add_foreign_key( + &mut self, + entity_name: &'_ str, + foreign_key_name: String, + table_to_reference: String, + column_to_reference: String, + canyon_register_entity_field: &CanyonRegisterEntityField, + ) { + self.constraints_operations + .push(Box::new(TableOperation::AddTableForeignKey( + entity_name.to_string(), + foreign_key_name, + canyon_register_entity_field.field_name.clone(), + table_to_reference, + column_to_reference, + ))); + } + + fn add_primary_key( + &mut self, + entity_name: &str, + canyon_register_entity_field: CanyonRegisterEntityField, + ) { + self.set_primary_key_operations + .push(Box::new(TableOperation::AddTablePrimaryKey( + entity_name.to_string(), + canyon_register_entity_field, + ))); + } + + fn add_identity(&mut self, entity_name: &str, field: CanyonRegisterEntityField) { + self.constraints_operations + .push(Box::new(ColumnOperation::AlterColumnAddIdentity( + entity_name.to_string(), + field.clone(), + ))); + + self.constraints_operations + .push(Box::new(SequenceOperation::ModifySequence( + entity_name.to_string(), + field, + ))); + } + + fn add_modify_or_remove_constraints( + &mut self, + entity_name: &str, + canyon_register_entity_field: CanyonRegisterEntityField, + current_column_metadata: &ColumnMetadata, + ) { + let field_is_primary_key = canyon_register_entity_field + .annotations + .iter() + .any(|anno| anno.starts_with("Annotation: PrimaryKey")); + + let field_is_foreign_key = canyon_register_entity_field + .annotations + .iter() + .any(|anno| anno.starts_with("Annotation: ForeignKey")); + + // ------------ PRIMARY KEY --------------- + // Case when field contains a primary key annotation, and it's not already on database, add it to constrains_operations + if field_is_primary_key && current_column_metadata.primary_key_info.is_none() { + Self::add_primary_key(self, entity_name, canyon_register_entity_field.clone()); + + if canyon_register_entity_field.is_autoincremental() { + Self::add_identity(self, entity_name, canyon_register_entity_field.clone()); + } + } + // Case when the field contains a primary key annotation, and it's already on the database + else if field_is_primary_key && current_column_metadata.primary_key_info.is_some() { + let is_autoincr_rust = canyon_register_entity_field.is_autoincremental(); + let is_autoincr_in_db = current_column_metadata.is_identity; + + if !is_autoincr_rust && is_autoincr_in_db { + Self::drop_identity(self, entity_name, canyon_register_entity_field.clone()) + } else if is_autoincr_rust && !is_autoincr_in_db { + Self::add_identity(self, entity_name, canyon_register_entity_field.clone()) + } + } + // Case when field doesn't contains a primary key annotation, but there is one in the database column + else if !field_is_primary_key && current_column_metadata.primary_key_info.is_some() { + Self::drop_primary_key( + self, + entity_name, + current_column_metadata + .primary_key_name + .as_ref() + .expect("PrimaryKey constrain name not found") + .to_string(), + ); + + if current_column_metadata.is_identity { + Self::drop_identity(self, entity_name, canyon_register_entity_field.clone()); + } + } + + // -------------------- FOREIGN KEY CASE ---------------------------- + // Case when field contains a foreign key annotation, and it's not already on database, add it to constraints_operations + if field_is_foreign_key && current_column_metadata.foreign_key_name.is_none() { + if current_column_metadata.foreign_key_name.is_none() { + let annotation_data = MigrationsHelper::extract_foreign_key_annotation( + &canyon_register_entity_field.annotations, + ); + + let foreign_key_name = format!( + "{entity_name}_{}_fkey", + &canyon_register_entity_field.field_name + ); + + Self::add_foreign_key( + self, + entity_name, + foreign_key_name, + annotation_data.0, + annotation_data.1, + &canyon_register_entity_field, + ); + } + } + // Case when field contains a foreign key annotation, and there is already one in the database + else if field_is_foreign_key && current_column_metadata.foreign_key_name.is_some() { + // Will contain the table name (on index 0) and column name (on index 1) pointed to by the foreign key + let annotation_data = MigrationsHelper::extract_foreign_key_annotation( + &canyon_register_entity_field.annotations, + ); + + let foreign_key_name = format!( + "{entity_name}_{}_fkey", + &canyon_register_entity_field.field_name + ); + + // Example of information in foreign_key_info: FOREIGN KEY (league) REFERENCES leagues(id) + let references_regex = Regex::new(regex_patterns::EXTRACT_FOREIGN_KEY_INFO).unwrap(); + + let captures_references = references_regex + .captures( + current_column_metadata + .foreign_key_info + .as_ref() + .expect("Regex - foreign key info"), + ) + .expect("Regex - foreign key info not found"); + + let current_column = captures_references + .name("current_column") + .expect("Regex - Current column not found") + .as_str() + .to_string(); + let ref_table = captures_references + .name("ref_table") + .expect("Regex - Ref tablenot found") + .as_str() + .to_string(); + let ref_column = captures_references + .name("ref_column") + .expect("Regex - Ref column not found") + .as_str() + .to_string(); + + // If entity foreign key is not equal to the one on database, a constrains_operations is added to delete it and add a new one. + if canyon_register_entity_field.field_name != current_column + || annotation_data.0 != ref_table + || annotation_data.1 != ref_column + { + Self::delete_foreign_key( + self, + entity_name, + current_column_metadata + .foreign_key_name + .as_ref() + .expect("Annotation foreign key constrain name not found") + .to_string(), + ); + + Self::add_foreign_key( + self, + entity_name, + foreign_key_name, + annotation_data.0, + annotation_data.1, + &canyon_register_entity_field, + ) + } + } else if !field_is_foreign_key && current_column_metadata.foreign_key_name.is_some() { + // Case when field don't contains a foreign key annotation, but there is already one in the database column + Self::delete_foreign_key( + self, + entity_name, + current_column_metadata + .foreign_key_name + .as_ref() + .expect("ForeignKey constrain name not found") + .to_string(), + ); + } + } + + fn drop_primary_key(&mut self, entity_name: &str, primary_key_name: String) { + self.drop_primary_key_operations + .push(Box::new(TableOperation::DeleteTablePrimaryKey( + entity_name.to_string(), + primary_key_name, + ))); + } + + fn drop_identity( + &mut self, + entity_name: &str, + canyon_register_entity_field: CanyonRegisterEntityField, + ) { + self.constraints_operations + .push(Box::new(ColumnOperation::AlterColumnDropIdentity( + entity_name.to_string(), + canyon_register_entity_field, + ))); + } + + fn delete_foreign_key(&mut self, entity_name: &str, constrain_name: String) { + self.constraints_operations + .push(Box::new(TableOperation::DeleteTableForeignKey( + // table_with_foreign_key,constrain_name + entity_name.to_string(), + constrain_name, + ))); + } + + /// Make the detected migrations for the next Canyon-SQL run + #[allow(clippy::await_holding_lock)] + pub async fn from_query_register(queries_to_execute: &HashMap<&str, Vec<&str>>) { + for datasource in queries_to_execute.iter() { + for query_to_execute in datasource.1 { + let res = Self::query(query_to_execute, [], datasource.0).await; + + match res { + Ok(_) => println!( + "\t[OK] - {:?} - Query: {:?}", + datasource.0, &query_to_execute + ), + Err(e) => println!( + "\t[ERR] - {:?} - Query: {:?}\nCause: {:?}", + datasource.0, &query_to_execute, e + ), + } + // TODO Ask for user input? + } + } + } +} + +/// Contains helper methods to parse and process the external and internal input data +/// for the migrations +struct MigrationsHelper; +impl MigrationsHelper { + /// Checks if a tracked Canyon entity is already present in the database + fn entity_already_on_database<'a>( + entity_name: &'a str, + database_tables: &'a [&'_ TableMetadata], + ) -> bool { + database_tables + .iter() + .any(|v| v.table_name.to_lowercase() == entity_name.to_lowercase()) + } + // Get the table metadata for a given entity name or his old entity name if the table was renamed. + fn get_current_table_metadata<'a>( + canyon_memory: &'_ CanyonMemory, + entity_name: &'a str, + database_tables: &'a [&'_ TableMetadata], + ) -> Option<&'a TableMetadata> { + let correct_entity_name = canyon_memory + .renamed_entities + .get(&entity_name.to_lowercase()) + .map(|e| e.to_owned()) + .unwrap_or_else(|| entity_name.to_string()); + + database_tables + .iter() + .find(|table_metadata| { + table_metadata.table_name.to_lowercase() == *correct_entity_name.to_lowercase() + }) + .map(|e| e.to_owned()) + } + + // Get the column metadata for a given column name + fn get_current_column_metadata( + column_name: String, + current_table_metadata: Option<&TableMetadata>, + ) -> Option<&ColumnMetadata> { + if let Some(metadata_table) = current_table_metadata { + metadata_table + .columns + .iter() + .find(|column| column.column_name == column_name) + } else { + None + } + } + + fn get_datatype_from_column_metadata(current_column_metadata: &ColumnMetadata) -> String { + // TODO Add all SQL Server text datatypes + if vec!["nvarchar", "varchar"] + .contains(¤t_column_metadata.datatype.to_lowercase().as_str()) + { + let varchar_len = match ¤t_column_metadata.character_maximum_length { + Some(v) => v.to_string(), + None => "max".to_string(), + }; + + format!("{}({})", current_column_metadata.datatype, varchar_len) + } else { + current_column_metadata.datatype.to_string() + } + } + + fn is_same_datatype( + db_type: DatabaseType, + canyon_register_entity_field: &CanyonRegisterEntityField, + current_column_metadata: &ColumnMetadata, + ) -> bool { + if db_type == DatabaseType::PostgreSql { + canyon_register_entity_field + .to_postgres_alter_syntax() + .to_lowercase() + == current_column_metadata.datatype + } else if db_type == DatabaseType::SqlServer { + // TODO Search a better way to get the datatype without useless info (like "VARCHAR(MAX)") + canyon_register_entity_field + .to_sqlserver_alter_syntax() + .to_lowercase() + == current_column_metadata.datatype + } else { + todo!() + } + } + + fn extract_foreign_key_annotation(field_annotations: &[String]) -> (String, String) { + let opt_fk_annotation = field_annotations + .iter() + .find(|anno| anno.starts_with("Annotation: ForeignKey")); + if let Some(fk_annotation) = opt_fk_annotation { + let annotation_data = fk_annotation + .split(',') + .filter(|x| !x.contains("Annotation: ForeignKey")) // After here, we only have the "table" and the "column" attribute values + .map(|x| { + x.split(':') + .collect::>() + .get(1) + .expect("Error. Unable to split annotations") + .trim() + .to_string() + }) + .collect::>(); + + let table_to_reference = annotation_data + .get(0) + .expect("Error extracting table ref from FK annotation") + .to_string(); + let column_to_reference = annotation_data + .get(1) + .expect("Error extracting column ref from FK annotation") + .to_string(); + + (table_to_reference, column_to_reference) + } else { + panic!("Detected a Foreign Key attribute when does not exists on the user's code"); + } + } +} + +#[cfg(test)] +mod migrations_helper_tests { + use super::*; + use crate::constants; + + const MOCKED_ENTITY_NAME: &str = "League"; + + #[test] + fn test_entity_already_on_database() { + let parse_result_empty_db_tables = + MigrationsHelper::entity_already_on_database(MOCKED_ENTITY_NAME, &[]); + // Always should be false + assert!(!parse_result_empty_db_tables); + + // Rust has a League entity. Database has a `league` entity. Case should be normalized + // and a match must raise + let mocked_league_entity_on_database = MigrationsHelper::entity_already_on_database( + MOCKED_ENTITY_NAME, + &[&constants::mocked_data::TABLE_METADATA_LEAGUE_EX], + ); + assert!(mocked_league_entity_on_database); + + let mocked_league_entity_on_database = MigrationsHelper::entity_already_on_database( + MOCKED_ENTITY_NAME, + &[&constants::mocked_data::NON_MATCHING_TABLE_METADATA], + ); + assert!(!mocked_league_entity_on_database) + } +} + +/// Trait that enables implementors to generate the migration queries +#[async_trait] +trait DatabaseOperation: Debug { + async fn generate_sql(&self, datasource: &DatasourceConfig<'static>); +} + +/// Helper to relate the operations that Canyon should do when it's managing a schema +#[derive(Debug)] +#[allow(dead_code)] +enum TableOperation { + CreateTable(String, Vec), + // old table_name, new table_name + AlterTableName(String, String), + // table_name, foreign_key_name, column_foreign_key, table_to_reference, column_to_reference + AddTableForeignKey(String, String, String, String, String), + // table_with_foreign_key, constraint_name + DeleteTableForeignKey(String, String), + // table_name, entity_field, column_name + AddTablePrimaryKey(String, CanyonRegisterEntityField), + // table_name, constraint_name + DeleteTablePrimaryKey(String, String), +} + +impl Transaction for TableOperation {} + +#[async_trait] +impl DatabaseOperation for TableOperation { + async fn generate_sql(&self, datasource: &DatasourceConfig<'static>) { + let db_type = datasource.properties.db_type; + + let stmt = match self { + TableOperation::CreateTable(table_name, table_fields) => { + if db_type == DatabaseType::PostgreSql { + format!( + "CREATE TABLE {:?} ({:?});", + table_name, + table_fields + .iter() + .map(|entity_field| format!( + "{} {}", + entity_field.field_name, + entity_field.to_postgres_syntax() + )) + .collect::>() + .join(", ") + ) + .replace('"', "") + } else if db_type == DatabaseType::SqlServer { + format!( + "CREATE TABLE {:?} ({:?});", + table_name, + table_fields + .iter() + .map(|entity_field| format!( + "{} {}", + entity_field.field_name, + entity_field.to_sqlserver_syntax() + )) + .collect::>() + .join(", ") + ) + .replace('"', "") + } else { + todo!() + } + } + + TableOperation::AlterTableName(old_table_name, new_table_name) => { + if db_type == DatabaseType::PostgreSql { + format!("ALTER TABLE {old_table_name} RENAME TO {new_table_name};") + } else if db_type == DatabaseType::SqlServer { + /* + Notes: Brackets around `old_table_name`, p.e. + exec sp_rename ['league'], 'leagues' // NOT VALID! + is only allowed for compound names splitted by a dot. + exec sp_rename ['random.league'], 'leagues' // OK + + CARE! This doesn't mean that we are including the schema. + exec sp_rename ['dbo.random.league'], 'leagues' // OK + exec sp_rename 'dbo.league', 'leagues' // OK - Schema doesn't need brackets + + Due to the automatic mapped name from Rust to DB and viceversa, this won't + be an allowed behaviour for now, only with the table_name parameter on the + CanyonEntity annotation. + */ + format!("exec sp_rename '{old_table_name}', '{new_table_name}';") + } else { + todo!() + } + } + + TableOperation::AddTableForeignKey( + table_name, + foreign_key_name, + column_foreign_key, + table_to_reference, + column_to_reference, + ) => { + if db_type == DatabaseType::PostgreSql { + format!( + "ALTER TABLE {table_name} ADD CONSTRAINT {foreign_key_name} \ + FOREIGN KEY ({column_foreign_key}) REFERENCES {table_to_reference} ({column_to_reference});" + ) + } else if db_type == DatabaseType::SqlServer { + todo!("[MS-SQL -> Operation still won't supported by Canyon for Sql Server]") + } else { + todo!() + } + } + + TableOperation::DeleteTableForeignKey(table_with_foreign_key, constraint_name) => { + if db_type == DatabaseType::PostgreSql { + format!( + "ALTER TABLE {table_with_foreign_key} DROP CONSTRAINT {constraint_name};", + ) + } else if db_type == DatabaseType::SqlServer { + todo!("[MS-SQL -> Operation still won't supported by Canyon for Sql Server]") + } else { + todo!() + } + } + + TableOperation::AddTablePrimaryKey(table_name, entity_field) => { + if db_type == DatabaseType::PostgreSql { + format!( + "ALTER TABLE {table_name} ADD PRIMARY KEY (\"{}\");", + entity_field.field_name + ) + } else if db_type == DatabaseType::SqlServer { + todo!("[MS-SQL -> Operation still won't supported by Canyon for Sql Server]") + } else { + todo!() + } + } + + TableOperation::DeleteTablePrimaryKey(table_name, primary_key_name) => { + if db_type == DatabaseType::PostgreSql || db_type == DatabaseType::SqlServer { + format!("ALTER TABLE {table_name} DROP CONSTRAINT {primary_key_name} CASCADE;") + } else { + todo!() + } + } + }; + + if QUERIES_TO_EXECUTE + .lock() + .unwrap() + .contains_key(datasource.name) + { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .get_mut(datasource.name) + .unwrap() + .push(stmt); + } else { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .insert(datasource.name, vec![stmt]); + } + } +} + +/// Helper to relate the operations that Canyon should do when a change on a field should +#[derive(Debug)] +#[allow(dead_code)] +enum ColumnOperation { + CreateColumn(String, CanyonRegisterEntityField), + DeleteColumn(String, String), + // AlterColumnName, + AlterColumnType(String, CanyonRegisterEntityField), + AlterColumnDropNotNull(String, CanyonRegisterEntityField), + // SQL server specific operation - SQL server can't drop a NOT NULL column + DropNotNullBeforeDropColumn(String, String, String), + AlterColumnSetNotNull(String, CanyonRegisterEntityField), + // TODO if implement throught annotations, modify for both GENERATED {ALWAYS, BY DEFAULT} + AlterColumnAddIdentity(String, CanyonRegisterEntityField), + AlterColumnDropIdentity(String, CanyonRegisterEntityField), +} + +impl Transaction for ColumnOperation {} + +#[async_trait] +impl DatabaseOperation for ColumnOperation { + async fn generate_sql(&self, datasource: &DatasourceConfig<'static>) { + let db_type = datasource.properties.db_type; + + let stmt = match self { + ColumnOperation::CreateColumn(table_name, entity_field) => + if db_type == DatabaseType::PostgreSql { + format!( + "ALTER TABLE {} ADD COLUMN \"{}\" {};", + table_name, + entity_field.field_name, + entity_field.to_postgres_syntax()) + } else if db_type == DatabaseType::SqlServer { + format!( + "ALTER TABLE {} ADD \"{}\" {};", + table_name, + entity_field.field_name, + entity_field.to_sqlserver_syntax() + ) + } else { + todo!() + }, + ColumnOperation::DeleteColumn(table_name, column_name) => { + // TODO Check if operation for SQL server is diferent + format!("ALTER TABLE {table_name} DROP COLUMN {column_name};") + }, + ColumnOperation::AlterColumnType(table_name, entity_field) => + if db_type == DatabaseType::PostgreSql { + format!( + "ALTER TABLE {table_name} ALTER COLUMN \"{}\" TYPE {};", + entity_field.field_name, + entity_field.to_postgres_alter_syntax()) + } else if db_type == DatabaseType::SqlServer { + todo!("[MS-SQL -> Operation still won't supported by Canyon for Sql Server]") + } else { + todo!() + } + , + ColumnOperation::AlterColumnDropNotNull(table_name, entity_field) => + if db_type == DatabaseType::PostgreSql { + format!( + "ALTER TABLE {:?} ALTER COLUMN \"{}\" DROP NOT NULL;", + table_name, entity_field.field_name + ) + } else if db_type == DatabaseType::SqlServer { + format!( + "ALTER TABLE {} ALTER COLUMN {} {} NULL", + table_name, entity_field.field_name, entity_field.to_sqlserver_alter_syntax() + ) + } else { + todo!() + } + + ColumnOperation::DropNotNullBeforeDropColumn(table_name, column_name, column_datatype) => + format!( + "ALTER TABLE {table_name} ALTER COLUMN {column_name} {column_datatype} NULL; DECLARE @tableName VARCHAR(MAX) = '{table_name}' + DECLARE @columnName VARCHAR(MAX) = '{column_name}' + DECLARE @ConstraintName nvarchar(200) + SELECT @ConstraintName = Name + FROM SYS.DEFAULT_CONSTRAINTS + WHERE PARENT_OBJECT_ID = OBJECT_ID(@tableName) + AND PARENT_COLUMN_ID = ( + SELECT column_id FROM sys.columns + WHERE NAME = @columnName AND object_id = OBJECT_ID(@tableName)) + IF @ConstraintName IS NOT NULL + EXEC('ALTER TABLE '+@tableName+' DROP CONSTRAINT ' + @ConstraintName);" + ), + + ColumnOperation::AlterColumnSetNotNull(table_name, entity_field) => format!( + "ALTER TABLE {table_name} ALTER COLUMN \"{}\" SET NOT NULL;", entity_field.field_name + ), + + ColumnOperation::AlterColumnAddIdentity(table_name, entity_field) => format!( + "ALTER TABLE {table_name} ALTER COLUMN \"{}\" ADD GENERATED ALWAYS AS IDENTITY;", entity_field.field_name + ), + + ColumnOperation::AlterColumnDropIdentity(table_name, entity_field) => format!( + "ALTER TABLE {table_name} ALTER COLUMN \"{}\" DROP IDENTITY;", entity_field.field_name + ), + }; + + if QUERIES_TO_EXECUTE + .lock() + .unwrap() + .contains_key(datasource.name) + { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .get_mut(datasource.name) + .unwrap() + .push(stmt); + } else { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .insert(datasource.name, vec![stmt]); + } + } +} + +/// Helper for operations involving sequences +#[derive(Debug)] +#[allow(dead_code)] +enum SequenceOperation { + ModifySequence(String, CanyonRegisterEntityField), +} + +impl Transaction for SequenceOperation {} + +#[async_trait] +impl DatabaseOperation for SequenceOperation { + async fn generate_sql(&self, datasource: &DatasourceConfig<'static>) { + let db_type = datasource.properties.db_type; + + let stmt = match self { + SequenceOperation::ModifySequence(table_name, entity_field) => { + if db_type == DatabaseType::PostgreSql { + format!( + "SELECT setval(pg_get_serial_sequence('{:?}', '{}'), max(\"{}\")) from {:?};", + table_name, entity_field.field_name, entity_field.field_name, table_name + ) + } else if db_type == DatabaseType::SqlServer { + todo!("[MS-SQL -> Operation still won't supported by Canyon for Sql Server]") + } else { + todo!() + } + } + }; + + if QUERIES_TO_EXECUTE + .lock() + .unwrap() + .contains_key(datasource.name) + { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .get_mut(datasource.name) + .unwrap() + .push(stmt); + } else { + QUERIES_TO_EXECUTE + .lock() + .unwrap() + .insert(datasource.name, vec![stmt]); + } + } +} diff --git a/canyon_observer/src/migrations/register_types.rs b/canyon_observer/src/migrations/register_types.rs new file mode 100644 index 00000000..b101cb77 --- /dev/null +++ b/canyon_observer/src/migrations/register_types.rs @@ -0,0 +1,265 @@ +use regex::Regex; + +use crate::constants::{postgresql_type, regex_patterns, rust_type, sqlserver_type}; + +/// This file contains `Rust` types that represents an entry on the `CanyonRegister` +/// where `Canyon` tracks the user types that has to manage + +/// Gets the necessary identifiers of a CanyonEntity to make it the comparative +/// against the database schemas +#[derive(Debug, Clone, Default)] +pub struct CanyonRegisterEntity<'a> { + pub entity_name: &'a str, + pub user_table_name: Option<&'a str>, + pub user_schema_name: Option<&'a str>, + pub entity_fields: Vec, +} + +/// Complementary type for a field that represents a struct field that maps +/// some real database column data +#[derive(Debug, Clone, Default)] +pub struct CanyonRegisterEntityField { + pub field_name: String, + pub field_type: String, + pub annotations: Vec, +} + +impl CanyonRegisterEntityField { + /// Return the postgres datatype and parameters to create a column for a given rust type + pub fn to_postgres_syntax(&self) -> String { + let rust_type_clean = self.field_type.replace(' ', ""); + + match rust_type_clean.as_str() { + rust_type::I8 | rust_type::U8 => { + String::from(&format!("{} NOT NULL", postgresql_type::INTEGER)) + } + rust_type::OPT_I8 | rust_type::OPT_U8 => String::from(postgresql_type::INTEGER), + + rust_type::I16 | rust_type::U16 => { + String::from(&format!("{} NOT NULL", postgresql_type::INTEGER)) + } + rust_type::OPT_I16 | rust_type::OPT_U16 => String::from(postgresql_type::INTEGER), + + rust_type::I32 | rust_type::U32 => { + String::from(&format!("{} NOT NULL", postgresql_type::INTEGER)) + } + rust_type::OPT_I32 | rust_type::OPT_U32 => String::from(postgresql_type::INTEGER), + + rust_type::I64 | rust_type::U64 => { + String::from(&format!("{} NOT NULL", postgresql_type::BIGINT)) + } + rust_type::OPT_I64 | rust_type::OPT_U64 => String::from(postgresql_type::BIGINT), + + rust_type::STRING => String::from(&format!("{} NOT NULL", postgresql_type::TEXT)), + rust_type::OPT_STRING => String::from(postgresql_type::TEXT), + + rust_type::BOOL => String::from(&format!("{} NOT NULL", postgresql_type::BOOLEAN)), + rust_type::OPT_BOOL => String::from(postgresql_type::BOOLEAN), + + rust_type::NAIVE_DATE => String::from(&format!("{} NOT NULL", postgresql_type::DATE)), + rust_type::OPT_NAIVE_DATE => String::from(postgresql_type::DATE), + + rust_type::NAIVE_TIME => String::from(&format!("{} NOT NULL", postgresql_type::TIME)), + rust_type::OPT_NAIVE_TIME => String::from(postgresql_type::TIME), + + rust_type::NAIVE_DATE_TIME => { + String::from(&format!("{} NOT NULL", postgresql_type::DATETIME)) + } + rust_type::OPT_NAIVE_DATE_TIME => String::from(postgresql_type::DATETIME), + &_ => todo!("Not supported datatype for this migrations version"), + } + } + + /// Return the postgres datatype and parameters to create a column for a given rust type + /// for Microsoft SQL Server + pub fn to_sqlserver_syntax(&self) -> String { + let rust_type_clean = self.field_type.replace(' ', ""); + + match rust_type_clean.as_str() { + rust_type::I8 | rust_type::U8 => { + String::from(&format!("{} NOT NULL", sqlserver_type::INT)) + } + rust_type::OPT_I8 | rust_type::OPT_U8 => String::from(sqlserver_type::INT), + + rust_type::I16 | rust_type::U16 => { + String::from(&format!("{} NOT NULL", sqlserver_type::INT)) + } + rust_type::OPT_I16 | rust_type::OPT_U16 => String::from(sqlserver_type::INT), + + rust_type::I32 | rust_type::U32 => { + String::from(&format!("{} NOT NULL", sqlserver_type::INT)) + } + rust_type::OPT_I32 | rust_type::OPT_U32 => String::from(sqlserver_type::INT), + + rust_type::I64 | rust_type::U64 => { + String::from(&format!("{} NOT NULL", sqlserver_type::BIGINT)) + } + rust_type::OPT_I64 | rust_type::OPT_U64 => String::from(sqlserver_type::BIGINT), + + rust_type::STRING => { + String::from(&format!("{} NOT NULL DEFAULT ''", sqlserver_type::NVARCHAR)) + } + rust_type::OPT_STRING => String::from(sqlserver_type::NVARCHAR), + + rust_type::BOOL => String::from(&format!("{} NOT NULL", sqlserver_type::BIT)), + rust_type::OPT_BOOL => String::from(sqlserver_type::BIT), + + rust_type::NAIVE_DATE => String::from(&format!("{} NOT NULL", sqlserver_type::DATE)), + rust_type::OPT_NAIVE_DATE => String::from(sqlserver_type::DATE), + + rust_type::NAIVE_TIME => String::from(&format!("{} NOT NULL", sqlserver_type::TIME)), + rust_type::OPT_NAIVE_TIME => String::from(sqlserver_type::TIME), + + rust_type::NAIVE_DATE_TIME => { + String::from(&format!("{} NOT NULL", sqlserver_type::DATETIME)) + } + rust_type::OPT_NAIVE_DATE_TIME => String::from(sqlserver_type::DATETIME), + &_ => todo!("Not supported datatype for this migrations version"), + } + } + + pub fn to_postgres_alter_syntax(&self) -> String { + let mut rust_type_clean = self.field_type.replace(' ', ""); + let rs_type_is_optional = self.field_type.to_uppercase().starts_with("OPTION"); + + if rs_type_is_optional { + let type_regex = Regex::new(regex_patterns::EXTRACT_RUST_OPT_REGEX).unwrap(); + let capture_rust_type = type_regex.captures(rust_type_clean.as_str()).unwrap(); + rust_type_clean = capture_rust_type + .name("rust_type") + .unwrap() + .as_str() + .to_string(); + } + + match rust_type_clean.as_str() { + rust_type::I8 | rust_type::U8 | rust_type::OPT_I8 | rust_type::OPT_U8 => { + String::from(postgresql_type::INT_8) + } + rust_type::I16 | rust_type::U16 | rust_type::OPT_I16 | rust_type::OPT_U16 => { + String::from(postgresql_type::SMALL_INT) + } + rust_type::I32 | rust_type::U32 | rust_type::OPT_I32 | rust_type::OPT_U32 => { + String::from(postgresql_type::INTEGER) + } + rust_type::I64 | rust_type::U64 | rust_type::OPT_I64 | rust_type::OPT_U64 => { + String::from(postgresql_type::BIGINT) + } + rust_type::STRING | rust_type::OPT_STRING => String::from(postgresql_type::TEXT), + rust_type::BOOL | rust_type::OPT_BOOL => String::from(postgresql_type::BOOLEAN), + rust_type::NAIVE_DATE | rust_type::OPT_NAIVE_DATE => { + String::from(postgresql_type::DATE) + } + rust_type::NAIVE_TIME | rust_type::OPT_NAIVE_TIME => { + String::from(postgresql_type::TIME) + } + rust_type::NAIVE_DATE_TIME | rust_type::OPT_NAIVE_DATE_TIME => { + String::from(postgresql_type::DATETIME) + } + &_ => todo!("Not supported datatype for this migrations version"), + } + } + + pub fn to_sqlserver_alter_syntax(&self) -> String { + let mut rust_type_clean = self.field_type.replace(' ', ""); + let rs_type_is_optional = self.field_type.to_uppercase().starts_with("OPTION"); + + if rs_type_is_optional { + let type_regex = Regex::new(regex_patterns::EXTRACT_RUST_OPT_REGEX).unwrap(); + let capture_rust_type = type_regex.captures(rust_type_clean.as_str()).unwrap(); + rust_type_clean = capture_rust_type + .name("rust_type") + .unwrap() + .as_str() + .to_string(); + } + + match rust_type_clean.as_str() { + rust_type::I8 | rust_type::U8 | rust_type::OPT_I8 | rust_type::OPT_U8 => { + String::from(sqlserver_type::TINY_INT) + } + rust_type::I16 | rust_type::U16 | rust_type::OPT_I16 | rust_type::OPT_U16 => { + String::from(sqlserver_type::SMALL_INT) + } + rust_type::I32 | rust_type::U32 | rust_type::OPT_I32 | rust_type::OPT_U32 => { + String::from(sqlserver_type::INT) + } + rust_type::I64 | rust_type::U64 | rust_type::OPT_I64 | rust_type::OPT_U64 => { + String::from(sqlserver_type::BIGINT) + } + rust_type::STRING | rust_type::OPT_STRING => String::from(sqlserver_type::NVARCHAR), + rust_type::BOOL | rust_type::OPT_BOOL => String::from(sqlserver_type::BIT), + rust_type::NAIVE_DATE | rust_type::OPT_NAIVE_DATE => String::from(sqlserver_type::DATE), + rust_type::NAIVE_TIME | rust_type::OPT_NAIVE_TIME => String::from(sqlserver_type::TIME), + rust_type::NAIVE_DATE_TIME | rust_type::OPT_NAIVE_DATE_TIME => { + String::from(sqlserver_type::DATETIME) + } + &_ => todo!("Not supported datatype for this migrations version"), + } + } + + /// Return the datatype and parameters to create an id column, given the corresponding "CanyonRegisterEntityField" + /// with the correct format for PostgreSQL + fn _to_postgres_id_syntax(&self) -> String { + let has_pk_annotation = self + .annotations + .iter() + .find(|a| a.starts_with("Annotation: PrimaryKey")); + + let pk_is_autoincremental = match has_pk_annotation { + Some(annotation) => annotation.contains("true"), + None => false, + }; + + let numeric = vec!["i16", "i32", "i64"]; + + let postgres_datatype_syntax = Self::to_postgres_syntax(self); + + if numeric.contains(&self.field_type.as_str()) && pk_is_autoincremental { + format!("{postgres_datatype_syntax} PRIMARY KEY GENERATED ALWAYS AS IDENTITY") + } else { + format!("{postgres_datatype_syntax} PRIMARY KEY") + } + } + + /// Return the datatype and parameters to create an id column, given the corresponding "CanyonRegisterEntityField" + /// with the correct format for Microsoft SQL Server + fn _to_sqlserver_id_syntax(&self) -> String { + let has_pk_annotation = self + .annotations + .iter() + .find(|a| a.starts_with("Annotation: PrimaryKey")); + + let pk_is_autoincremental = match has_pk_annotation { + Some(annotation) => annotation.contains("true"), + None => false, + }; + + let numeric = vec!["i16", "i32", "i64"]; + + let sqlserver_datatype_syntax = Self::to_sqlserver_syntax(self); + + if numeric.contains(&self.field_type.as_str()) && pk_is_autoincremental { + format!("{sqlserver_datatype_syntax} IDENTITY PRIMARY") + } else { + format!("{sqlserver_datatype_syntax} PRIMARY KEY") + } + } + + /// Return if the field is autoincremental + pub fn is_autoincremental(&self) -> bool { + let has_pk_annotation = self + .annotations + .iter() + .find(|a| a.starts_with("Annotation: PrimaryKey")); + + let pk_is_autoincremental = match has_pk_annotation { + Some(annotation) => annotation.contains("true"), + None => false, + }; + + let numeric = vec!["i16", "i32", "i64"]; + + numeric.contains(&self.field_type.as_str()) && pk_is_autoincremental + } +} diff --git a/canyon_observer/src/postgresql/information_schema.rs b/canyon_observer/src/postgresql/information_schema.rs deleted file mode 100644 index 9a6f4054..00000000 --- a/canyon_observer/src/postgresql/information_schema.rs +++ /dev/null @@ -1,96 +0,0 @@ -/// `PostgreSQL` entities for map the multiple rows that are related to one table, and the multiple -/// columns that are related to those table -pub mod information_schema_row_mapper { - /// The representation of a row of results when the `information schema` it's queried - /// - /// Too see an example, see the docs of [`CanyonHandler`] on fn@get_info_of_entities() - #[derive(Debug)] - pub struct RowTable { - pub table_name: String, - pub columns: Vec, - } - - /// A column retrives from the `information schema` query that belongs to a [`RowTable`] element, - /// representing one of the total columns of a table - #[derive(Debug)] - pub struct RelatedColumn { - pub column_identifier: String, - pub datatype: String, - pub value: ColumnTypeValue, - } - - /// Represents the relation between a real value stored inside a [`RelatedColumn`] - /// and the datatype of that value - #[derive(Debug)] - pub enum ColumnTypeValue { - StringValue(Option), - IntValue(Option), - NoneValue, - } -} - - -/// This mod contains the structs necessary to map the data retrieved when the -/// `information schema` PostgreSQL table it's queried and after being parsed that rows -/// into elements of type [`InformationSchemaRowMapper`], in order to fetch and model the -/// data about the tables (and it's columns) that it's handling -pub mod rows_to_table_mapper { - - /// Model that represents the database entities that belongs to the current schema. - /// - /// Basically, it's an agrupation of rows of results when Canyon queries the `information schema` - /// table, grouping by table name (one [`DatabaseTable`] is the rows that contains the information - /// of a table) - #[derive(Debug, Clone)] - pub struct DatabaseTable<'a> { - pub table_name: String, - pub columns: Vec>, - } - - /// Represents the *metadata* associated with a column that belongs to a `PostgreSQL` table. - #[derive(Debug, Clone)] - pub struct DatabaseTableColumn<'a> { - pub column_name: String, - pub postgres_datatype: String, - pub character_maximum_length: Option, - pub is_nullable: bool, - // Care, postgres type is varchar - pub column_default: Option, - pub numeric_precision: Option, - pub numeric_scale: Option, - pub numeric_precision_radix: Option, - pub datetime_precision: Option, - pub interval_type: Option, - pub foreign_key_info: Option, - pub foreign_key_name: Option, - pub primary_key_info: Option, - pub primary_key_name: Option, - pub is_identity: bool, - pub identity_generation: Option, - pub phantom: &'a str, // TODO - } - - impl<'a> DatabaseTableColumn<'a> { - pub fn new() -> DatabaseTableColumn<'a> { - Self { - column_name: String::new(), - postgres_datatype: String::new(), - character_maximum_length: None, - is_nullable: true, - column_default: None, - numeric_precision: None, - numeric_scale: None, - numeric_precision_radix: None, - datetime_precision: None, - interval_type: None, - foreign_key_info: None, - foreign_key_name: None, - primary_key_info: None, - primary_key_name: None, - is_identity: false, - identity_generation: None, - phantom: "", - } - } - } -} \ No newline at end of file diff --git a/canyon_observer/src/postgresql/migrations.rs b/canyon_observer/src/postgresql/migrations.rs deleted file mode 100644 index 74b9fe29..00000000 --- a/canyon_observer/src/postgresql/migrations.rs +++ /dev/null @@ -1,794 +0,0 @@ -/// File that contains all the datatypes and logic to perform the migrations -/// over a `PostgreSQL` database - -use std::{ops::Not, sync::MutexGuard}; -use std::fmt::{Debug, Display}; -use async_trait::async_trait; - -use canyon_crud::crud::Transaction; -use regex::Regex; -use crate::memory::CanyonMemory; -use crate::QUERIES_TO_EXECUTE; -use crate::postgresql::information_schema::rows_to_table_mapper::DatabaseTable; - -use super::register_types::{CanyonRegisterEntityField, CanyonRegisterEntity}; - - -/// Responsible of generating the queries to sync the database status with the -/// Rust source code managed by Canyon, for succesfully make the migrations -#[derive(Debug)] -pub struct DatabaseSyncOperations { - operations: Vec>, - drop_primary_key_operations: Vec>, - set_primary_key_operations: Vec>, - constrains_operations: Vec> -} - -impl Transaction for DatabaseSyncOperations {} - -impl DatabaseSyncOperations { - pub fn new() -> Self { - Self { - operations: Vec::new(), - drop_primary_key_operations: Vec::new(), - set_primary_key_operations: Vec::new(), - constrains_operations: Vec::new() - } - } - - pub async fn fill_operations<'a>( - &mut self, - canyon_memory: CanyonMemory, - canyon_tables: Vec>, - database_tables: Vec> - ) { - // For each entity (table) on the register - for canyon_register_entity in canyon_tables { - - let table_name = canyon_register_entity.entity_name; - - // true if this table on the register is already on the database - let table_on_database = Self::check_table_on_database(&table_name, &database_tables); - - // If the table isn't on the database we push a new operation to the collection, - // either to create a new table or to rename an existing one. - if !table_on_database { - let table_renamed = canyon_memory.table_rename.contains_key(&*table_name); - - // canyon_memory holds a hashmap of the tables who must changed their name. - // If this table name is present, we dont create a new one, just rename - if table_renamed { - // let old_table_name = data.canyon_memory.table_rename.to_owned().get(&table_name.to_owned()); - let otn = canyon_memory.table_rename.get(table_name).unwrap().to_owned().clone(); - - Self::push_table_rename::(self, otn,&table_name); - - // TODO Change foreign_key constrain name on database - continue; - } - // If not, we push an operation to create a new one - else { - Self::add_new_table::<&str>(self, table_name, canyon_register_entity.entity_fields.clone()); - } - - let cloned_fields = canyon_register_entity.entity_fields.clone(); - // We iterate over the fields/columns seeking for constrains to add - for field in cloned_fields - .iter() - .filter( - |column| column.annotations.len() > 0 - ) { - field.annotations.iter() - .for_each( |attr| - { - if attr.starts_with("Annotation: ForeignKey") { - Self::add_foreign_key_with_annotation::<&str, &String>( - self, &field.annotations, table_name, &field.field_name, - ); - } - if attr.starts_with("Annotation: PrimaryKey") { - Self::add_primary_key::<&str>( - self, table_name, field.clone() - ); - - Self::add_identity::<&str>( - self, table_name, field.clone() - ); - } - } - ); - } - } else { - // We check if each of the columns in this table of the register is in the database table. - // We get the names and add them to a vector of strings - let columns_in_table = Self::columns_in_table( - canyon_register_entity.entity_fields.clone(), - &database_tables, - &table_name, - ); - - // For each field (name, type) in this table of the register - for field in canyon_register_entity.entity_fields.clone() { - // Case when the column doesn't exist on the database - // We push a new column operation to the collection for each one - if !columns_in_table.contains(&field.field_name) { - Self::add_column_to_table::<&str>(self, &table_name, field.clone()); - - // We added the founded constraints on the field attributes - for attr in &field.annotations { - if attr.starts_with("Annotation: ForeignKey") { - Self::add_foreign_key_with_annotation::<&str, &String>( - self, &field.annotations, table_name, &field.field_name, - ); - } - if attr.starts_with("Annotation: PrimaryKey") { - - Self::add_primary_key::<&str>( - self, table_name, field.clone(), - ); - - Self::add_identity::<&str>( - self, table_name, field.clone(), - ); - } - } - - - } - // Case when the column exist on the database - else { - - let d = database_tables.clone(); - let database_table = d - .into_iter() - .find(|x| x.table_name == *table_name) - .unwrap(); - - let database_field = database_table.columns - .iter().find(|x| x.column_name == field.field_name) - .expect("Field annt exists"); - - let mut database_field_postgres_type: String = String::new(); - match database_field.postgres_datatype.as_str() { - "integer" => { - database_field_postgres_type.push_str("i32"); - } - "bigint" => { - database_field_postgres_type.push_str("i64"); - } - "text" | "character varying" => { - database_field_postgres_type.push_str("String"); - } - "date" => { - database_field_postgres_type.push_str("NaiveDate"); - } - _ => {} - } - - if database_field.is_nullable { - database_field_postgres_type = format!("Option<{}>", database_field_postgres_type); - } - - if field.field_type != database_field_postgres_type { - if field.field_type.starts_with("Option") { - self.constrains_operations.push( - Box::new( - ColumnOperation::AlterColumnDropNotNull(table_name, field.clone()) - ) - ); - } else{ - self.constrains_operations.push( - Box::new( - ColumnOperation::AlterColumnSetNotNull(table_name, field.clone()) - ) - ); - } - Self::change_column_type(self, table_name, field.clone()); - } - - - let field_is_primary_key = field.annotations.iter() - .any(|anno| anno.starts_with("Annotation: PrimaryKey")); - - let field_is_foreign_key = field.annotations.iter() - .any(|anno| anno.starts_with("Annotation: ForeignKey")); - // TODO Checking Foreign Key attrs. Refactor to a database rust attributes matcher - // TODO Evaluate changing the name of the primary key if it already exists in the database - - // -------- PRIMARY KEY CASE ---------------------------- - - // Case when field contains a primary key annotation, and it's not already on database, add it to constrains_operations - if field_is_primary_key && database_field.primary_key_info.is_none() { - Self::add_primary_key::<&str>( - self, table_name, field.clone(), - ); - Self::add_identity::<&str>( - self, table_name, field.clone(), - ); - } - - // Case when field don't contains a primary key annotation, but there is already one in the database column - else if !field_is_primary_key && database_field.primary_key_info.is_some() { - Self::drop_primary_key::( - self, - table_name.to_string(), - database_field.primary_key_name - .as_ref() - .expect("PrimaryKey constrain name not found") - .to_string(), - ); - - if database_field.is_identity { - Self::drop_identity::<&str>( - self, table_name, field.clone() - ); - } - - } - - // -------- FOREIGN KEY CASE ---------------------------- - - // Case when field contains a foreign key annotation, and it's not already on database, add it to constrains_operations - if field_is_foreign_key && database_field.foreign_key_name.is_none() { - if database_field.foreign_key_name.is_none() { - Self::add_foreign_key_with_annotation::<&str, &String>( - self, &field.annotations, table_name, &field.field_name, - ) - } - } - // Case when field contains a foreign key annotation, and there is already one in the database - else if field_is_foreign_key && database_field.foreign_key_name.is_some() { - // Will contain the table name (on index 0) and column name (on index 1) pointed to by the foreign key - let annotation_data = Self::extract_foreign_key_annotation(&field.annotations); - - // Example of information in foreign_key_info: FOREIGN KEY (league) REFERENCES leagues(id) - let references_regex = Regex::new(r"\w+\s\w+\s\((?P\w+)\)\s\w+\s(?P\w+)\((?P\w+)\)").unwrap(); - - let captures_references = references_regex.captures(database_field.foreign_key_info.as_ref().expect("Regex - foreign key info")).expect("Regex - foreign key info not found"); - - let current_column = captures_references.name("current_column").expect("Regex - Current column not found").as_str().to_string(); - let ref_table = captures_references.name("ref_table").expect("Regex - Ref tablenot found").as_str().to_string(); - let ref_column = captures_references.name("ref_column").expect("Regex - Ref column not found").as_str().to_string(); - - // If entity foreign key is not equal to the one on database, a constrains_operations is added to delete it and add a new one. - if field.field_name != current_column || annotation_data.0 != ref_table || annotation_data.1 != ref_column { - Self::delete_foreign_key_with_references::( - self, - table_name.to_string(), - database_field.foreign_key_name - .as_ref() - .expect("Annotation foreign key constrain name not found") - .to_string() - ); - - Self::add_foreign_key_with_references( - self, - annotation_data.0, - annotation_data.1, - table_name, - field.field_name.clone(), - ) - } - } - // Case when field don't contains a foreign key annotation, but there is already one in the database column - else if !field_is_foreign_key && database_field.foreign_key_name.is_some() { - Self::delete_foreign_key_with_references::( - self, - table_name.to_string(), - database_field.foreign_key_name - .as_ref() - .expect("ForeignKey constrain name not found") - .to_string() - ); - } - } - } - - // Filter the list of columns in the corresponding table of the database for the current table of the register, - // and look for columns that don't exist in the table of the register - let columns_to_remove: Vec = Self::columns_to_remove( - &database_tables, - canyon_register_entity.entity_fields.clone(), - &table_name, - ); - - // If we have columns to remove, we push a new operation to the vector for each one - if columns_to_remove.is_empty().not() { - for column in &columns_to_remove { - Self::delete_column_from_table(self, table_name, column.to_owned()) - } - } - } - } - - - for operation in &self.operations { - operation.execute().await - } - for drop_primary_key_operation in &self.drop_primary_key_operations { - drop_primary_key_operation.execute().await - } - for set_primary_key_operation in &self.set_primary_key_operations { - set_primary_key_operation.execute().await - } - for constrain_operation in &self.constrains_operations { - constrain_operation.execute().await - } - } - - /// Make the detected migrations for the next Canyon-SQL run - pub async fn from_query_register() { - let queries: &MutexGuard> = &QUERIES_TO_EXECUTE.lock().unwrap(); - - for i in 0..queries.len() - 1 { - let query_to_execute = queries - .get(i) - .expect(format!("Failed to retrieve query from the register at index: {}", i).as_str()); - - Self::query( - query_to_execute, - vec![], - "" - ).await - .ok() - .expect(format!("Failed the migration query: {:?}", queries.get(i).unwrap()).as_str()); - // TODO Represent failable operation by logging (if configured by the user) to a text file the Result variant - // TODO Ask for user input? - } - } - - fn check_table_on_database<'a>( - table_name: &'a str, database_tables: &Vec> - ) -> bool { - database_tables - .iter() - .any(|v| &v.table_name == table_name) - } - - fn columns_in_table( - canyon_columns: Vec, - database_tables: &[DatabaseTable<'_>], - table_name: &str, - ) -> Vec { - canyon_columns.iter() - .filter(|a| database_tables.iter() - .find( |x| x.table_name == table_name).expect("Error collecting database tables") - .columns - .iter() - .map(|x| x.column_name.to_string()) - .any(|x| x == a.field_name)) - .map(|a| a.field_name.to_string()).collect() - } - - fn columns_to_remove( - database_tables: &[DatabaseTable<'_>], - canyon_columns: Vec, - table_name: &str, - ) -> Vec { - database_tables.iter() - .find(|x| x.table_name == table_name).expect("Error parsing the columns to remove") - .columns - .iter() - .filter(|a| canyon_columns.iter() - .map(|x| x.field_name.to_string()) - .any(|x| x == a.column_name).not()) - .map(|a| a.column_name.to_string()).collect() - } - - - fn push_table_rename(&mut self, old_table_name: T, new_table_name: U) - where - T: Into + Debug + Display + Sync + 'static, - U: Into + Debug + Display + Sync + 'static - { - self.operations.push( - Box::new( - TableOperation::AlterTableName::<_, _, &str, &str, &str>( - old_table_name, - new_table_name, - ) - ) - ); - } - - fn add_new_table(&mut self, table_name: T, columns: Vec) - where T: Into + Debug + Display + Sync + 'static - { - self.operations.push( - Box::new( - TableOperation::CreateTable::<_, &str, &str, &str, &str>( - table_name, - columns, - ) - ) - ); - } - - fn extract_foreign_key_annotation(field_annotations: &Vec) -> (String, String) - { - let opt_fk_annotation = field_annotations.iter(). - find(|anno| anno.starts_with("Annotation: ForeignKey")); - if let Some(fk_annotation) = opt_fk_annotation { - let annotation_data = fk_annotation - .split(',') - .filter(|x| !x.contains("Annotation: ForeignKey")) // After here, we only have the "table" and the "column" attribute values - .map(|x| - x.split(':').collect::>() - .get(1) - .expect("Error. Unable to split annotations") - .trim() - .to_string() - ).collect::>(); - - let table_to_reference = annotation_data - .get(0) - .expect("Error extracting table ref from FK annotation") - .to_string(); - let column_to_reference = annotation_data - .get(1) - .expect("Error extracting column ref from FK annotation") - .to_string(); - - (table_to_reference, column_to_reference) - } else { - panic!("Detected a Foreign Key attribute when does not exists on the user's code"); - } - - } - - fn add_foreign_key_with_annotation<'a, U, V>( - &mut self, - field_annotations: &'a Vec, - table_name: U, - column_foreign_key: V, - ) where - U: Into + Debug + Display + Sync, - V: Into + Debug + Display + Sync - { - - let annotation_data = Self::extract_foreign_key_annotation(field_annotations); - - let table_to_reference = annotation_data.0; - let column_to_reference = annotation_data.1; - - let foreign_key_name = format!("{}_{}_fkey", table_name, &column_foreign_key); - - self.constrains_operations.push( - Box::new( - TableOperation::AddTableForeignKey::( - table_name.to_string(), foreign_key_name, column_foreign_key.to_string(), table_to_reference, column_to_reference, - ) - ) - ); - } - - fn add_foreign_key_with_references( - &mut self, - table_to_reference: T, - column_to_reference: U, - table_name: V, - column_foreign_key: W, - ) where - T: Into + Debug + Display + Sync + 'static, - U: Into + Debug + Display + Sync + 'static, - V: Into + Debug + Display + Sync + 'static, - W: Into + Debug + Display + Sync + 'static - { - let foreign_key_name = format!("{}_{}_fkey", &table_name, &column_foreign_key); - - - self.constrains_operations.push( - Box::new( - TableOperation::AddTableForeignKey( - table_name, foreign_key_name, column_foreign_key, table_to_reference, column_to_reference, - ) - ) - ); - } - - fn delete_foreign_key_with_references( - &mut self, - table_with_foreign_key: T, - constrain_name: T, - ) where - T: Into + Debug + Display + Sync + 'static - { - self.constrains_operations.push( - Box::new( - TableOperation::DeleteTableForeignKey::( - // table_with_foreign_key,constrain_name - table_with_foreign_key, constrain_name, - ) - ) - ); - } - - - fn add_primary_key(&mut self, table_name: T, field: CanyonRegisterEntityField) - where T: Into + Debug + Display + Sync + 'static - { - self.set_primary_key_operations.push( - Box::new( - TableOperation::AddTablePrimaryKey::( - table_name, field - ) - ) - ); - } - - fn drop_primary_key(&mut self, table_name: T, primary_key_name: T) - where T: Into + Debug + Display + Sync + 'static - { - self.drop_primary_key_operations.push( - Box::new( - TableOperation::DeleteTablePrimaryKey::( - table_name, primary_key_name - ) - ) - ); - } - - fn add_identity(&mut self, table_name: T, field: CanyonRegisterEntityField) - where T: Into + Debug + Display + Sync + 'static - { - - - self.constrains_operations.push( - Box::new( - ColumnOperation::AlterColumnAddIdentity( - table_name.to_string(), field.clone(), - ) - ) - ); - - self.constrains_operations.push( - Box::new( - SequenceOperation::ModifySequence( - table_name, field, - ) - ) - ); - } - - fn drop_identity(&mut self, table_name: T, field: CanyonRegisterEntityField) - where T: Into + Debug + Display + Sync + 'static - { - self.constrains_operations.push( - Box::new( - ColumnOperation::AlterColumnDropIdentity( - table_name, field, - ) - ) - ); - } - - - - fn add_column_to_table(&mut self, table_name: T, field: CanyonRegisterEntityField) - where T: Into + Debug + Display + Sync + 'static - { - self.operations.push( - Box::new( - ColumnOperation::CreateColumn( - table_name, field - ) - ) - ); - } - - fn change_column_type(&mut self, table_name: T, field: CanyonRegisterEntityField) - where T: Into + Debug + Display + Sync + 'static - { - self.operations.push( - Box::new( - ColumnOperation::AlterColumnType( - table_name, field, - ) - ) - ); - } - - fn delete_column_from_table(&mut self, table_name: T, column: String) - where T: Into + Debug + Display + Sync + 'static - { - self.operations.push( - Box::new( - ColumnOperation::DeleteColumn(table_name, column) - ) - ); - } -} - -/// Trait that enables implementors to execute migration queries -#[async_trait] -trait DatabaseOperation: Debug { - async fn execute(&self); -} - -/// Helper to relate the operations that Canyon should do when it's managing a schema -#[derive(Debug)] -enum TableOperation { - CreateTable(T, Vec), - // old table_name, new table_name - AlterTableName(T, U), - // table_name, foreign_key_name, column_foreign_key, table_to_reference, column_to_reference - AddTableForeignKey(T, U, V, W, X), - // table_with_foreign_key, constrain_name - DeleteTableForeignKey(T, T), - // table_name, entity_field, column_name - AddTablePrimaryKey(T, CanyonRegisterEntityField), - // table_name, constrain_name - DeleteTablePrimaryKey(T, T) - -} - - -impl Transaction for TableOperation - where - T: Into + Debug + Display + Sync, - U: Into + Debug + Display + Sync, - V: Into + Debug + Display + Sync, - W: Into + Debug + Display + Sync, - X: Into + Debug + Display + Sync - {} - -#[async_trait] -impl DatabaseOperation for TableOperation - where - T: Into + Debug + Display + Sync, - U: Into + Debug + Display + Sync, - V: Into + Debug + Display + Sync, - W: Into + Debug + Display + Sync, - X: Into + Debug + Display + Sync -{ - async fn execute(&self) { - let stmt = match &*self { - TableOperation::CreateTable(table_name, table_fields) => - format!( - "CREATE TABLE {table_name} ({:?});", - table_fields.iter().map(|entity_field| - format!("{} {}", entity_field.field_name, entity_field.field_type_to_postgres()) - ).collect::>().join(", ") - ).replace('"', ""), - - TableOperation::AlterTableName(old_table_name, new_table_name) => - format!("ALTER TABLE {old_table_name} RENAME TO {new_table_name};"), - - TableOperation::AddTableForeignKey( - table_name, - foreign_key_name, - column_foreign_key, - table_to_reference, - column_to_reference - ) => format!( - "ALTER TABLE {table_name} ADD CONSTRAINT {foreign_key_name} \ - FOREIGN KEY ({column_foreign_key}) REFERENCES {table_to_reference} ({column_to_reference});" - ), - - TableOperation::DeleteTableForeignKey(table_with_foreign_key, constrain_name) => - format!("ALTER TABLE {table_with_foreign_key} DROP CONSTRAINT {constrain_name};"), - - TableOperation::AddTablePrimaryKey( - table_name, - entity_field - ) => format!( - "ALTER TABLE {} ADD PRIMARY KEY (\"{}\");", - table_name, - entity_field.field_name - ), - - TableOperation::DeleteTablePrimaryKey( - table_name, - primary_key_name - ) => format!( - "ALTER TABLE {table_name} DROP CONSTRAINT {primary_key_name} CASCADE;" - ), - - }; - - QUERIES_TO_EXECUTE.lock().unwrap().push(stmt) - } -} - -/// Helper to relate the operations that Canyon should do when a change on a field should -#[derive(Debug)] -enum ColumnOperation + std::fmt::Debug + Display + Sync> { - CreateColumn(T, CanyonRegisterEntityField), - DeleteColumn(T, String), - // AlterColumnName, - AlterColumnType(T, CanyonRegisterEntityField), - AlterColumnDropNotNull(T, CanyonRegisterEntityField), - AlterColumnSetNotNull(T, CanyonRegisterEntityField), - // TODO if implement throught annotations, modify for both GENERATED {ALWAYS,BY DEFAULT} - AlterColumnAddIdentity(T, CanyonRegisterEntityField), - AlterColumnDropIdentity(T, CanyonRegisterEntityField) - -} - -impl Transaction for ColumnOperation - where T: Into + std::fmt::Debug + Display + Sync -{} - -#[async_trait] -impl DatabaseOperation for ColumnOperation - where T: Into + std::fmt::Debug + Display + Sync -{ - async fn execute(&self) { - let stmt = match &*self { - ColumnOperation::CreateColumn(table_name, entity_field) => - format!( - "ALTER TABLE {table_name} ADD COLUMN \"{}\" {};", - entity_field.field_name, - entity_field.field_type_to_postgres() - ), - ColumnOperation::DeleteColumn(table_name, column_name) => - format!("ALTER TABLE {table_name} DROP COLUMN \"{column_name}\";"), - ColumnOperation::AlterColumnType(table_name, entity_field) => - format!( - "ALTER TABLE {} ALTER COLUMN \"{}\" TYPE {};", - table_name, - entity_field.field_name, - entity_field.to_postgres_alter_syntax() - ), - ColumnOperation::AlterColumnDropNotNull(table_name, entity_field) => - format!( - "ALTER TABLE {} ALTER COLUMN \"{}\" DROP NOT NULL;", - table_name, - entity_field.field_name - ), - ColumnOperation::AlterColumnSetNotNull(table_name, entity_field) => - format!( - "ALTER TABLE {} ALTER COLUMN \"{}\" SET NOT NULL;", - table_name, - entity_field.field_name - ), - - ColumnOperation::AlterColumnAddIdentity(table_name, entity_field) => - format!( - "ALTER TABLE {} ALTER COLUMN \"{}\" ADD GENERATED ALWAYS AS IDENTITY;", - table_name, - entity_field.field_name - ), - - ColumnOperation::AlterColumnDropIdentity(table_name, entity_field) => - format!( - "ALTER TABLE {} ALTER COLUMN \"{}\" DROP IDENTITY;", - table_name, - entity_field.field_name - ), - - }; - - QUERIES_TO_EXECUTE.lock().unwrap().push(stmt) - } -} - - -/// Helper for operations involving sequences -#[derive(Debug)] -enum SequenceOperation + std::fmt::Debug + Display + Sync> { - ModifySequence(T, CanyonRegisterEntityField), -} - -impl Transaction for SequenceOperation - where T: Into + std::fmt::Debug + Display + Sync -{} - -#[async_trait] -impl DatabaseOperation for SequenceOperation - where T: Into + std::fmt::Debug + Display + Sync -{ - async fn execute(&self) { - let stmt = match &*self { - SequenceOperation::ModifySequence(table_name, entity_field) => - format!( - "SELECT setval(pg_get_serial_sequence('{}', '{}'), max(\"{}\")) from {};", - table_name, - entity_field.field_name, - entity_field.field_name, - table_name - ) - }; - QUERIES_TO_EXECUTE.lock().unwrap().push(stmt) - } -} \ No newline at end of file diff --git a/canyon_observer/src/postgresql/mod.rs b/canyon_observer/src/postgresql/mod.rs deleted file mode 100644 index 173a3b35..00000000 --- a/canyon_observer/src/postgresql/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod information_schema; -pub mod migrations; -pub mod register_types; \ No newline at end of file diff --git a/canyon_observer/src/postgresql/register_types.rs b/canyon_observer/src/postgresql/register_types.rs deleted file mode 100644 index cbb31011..00000000 --- a/canyon_observer/src/postgresql/register_types.rs +++ /dev/null @@ -1,174 +0,0 @@ -use regex::Regex; - -use crate::constants::{ - rust_type, - // postgresql_type -}; - -/// This file contains `Rust` types that represents an entry of the [`CanyonRegister`] -/// where `Canyon` tracks the user types that has to manage for him - -/// Gets the necessary identifiers of a CanyonEntity to make it the comparative -/// against the database schemas -#[derive(Debug, Clone)] -pub struct CanyonRegisterEntity<'a> { - pub entity_name: &'a str, - pub user_table_name: Option<&'a str>, - pub user_schema_name: Option<&'a str>, - pub entity_fields: Vec, -} - -impl<'a> CanyonRegisterEntity<'a> { - pub fn new() -> Self { - Self { - entity_name: "", - user_table_name: None, - user_schema_name: None, - entity_fields: Vec::new(), - } - } - - /// Returns the String representation for the current "CanyonRegisterEntity" instance. - /// Being "CanyonRegisterEntity" the representation of a table, the String will be formed by each of its "CanyonRegisterEntityField", - /// formatting each as "name of the column" "postgres representation of the type" "parameters for the column" - pub fn entity_fields_as_string(&self) -> String { - - let mut fields_strings:Vec = Vec::new(); - - for field in &self.entity_fields { - - let column_postgres_syntax = field.field_type_to_postgres(); - let field_as_string = format!("{} {}", field.field_name, column_postgres_syntax); - fields_strings.push(field_as_string); - } - - fields_strings.join(" ") - } - } - -/// Complementary type for a field that represents a struct field that maps -/// some real database column data -#[derive(Debug, Clone)] -pub struct CanyonRegisterEntityField { - pub field_name: String, - pub field_type: String, - pub annotations: Vec -} - -impl CanyonRegisterEntityField { - pub fn new() -> CanyonRegisterEntityField { - Self { - field_name: String::new(), - field_type: String::new(), - annotations: Vec::new() - } - } - - /// Return the postgres datatype and parameters to create a column for a given rust type - fn to_postgres_syntax(&self) -> String { - let mut rust_type_clean = self.field_type.replace(' ',""); - let rs_type_is_optional = self.field_type.to_uppercase().starts_with("OPTION"); - - if rs_type_is_optional { - let type_regex = Regex::new(r"[Oo][Pp][Tt][Ii][Oo][Nn]<(?P[\w<>]+)>").unwrap(); - let capture_rust_type = type_regex.captures(rust_type_clean.as_str()).unwrap(); - rust_type_clean = capture_rust_type.name("rust_type").unwrap().as_str().to_string(); - } - - let mut postgres_type = String::new(); - - match rust_type_clean.as_str() { - rust_type::I32 => postgres_type.push_str("INTEGER NOT NULL"), - rust_type::OPT_I32 => postgres_type.push_str("INTEGER"), - rust_type::I64 => postgres_type.push_str("BIGINT NOT NULL"), - rust_type::OPT_I64 => postgres_type.push_str("BIGINT"), - rust_type::STRING => postgres_type.push_str("TEXT NOT NULL"), - rust_type::OPT_STRING => postgres_type.push_str("TEXT"), - rust_type::BOOL => postgres_type.push_str("BOOLEAN NOT NULL"), - rust_type::OPT_BOOL => postgres_type.push_str("BOOLEAN"), - rust_type::NAIVE_DATE => postgres_type.push_str("DATE NOT NULL"), - rust_type::OPT_NAIVE_DATE => postgres_type.push_str("DATE"), - &_ => postgres_type.push_str("DATE") - } - - postgres_type - } - - pub fn to_postgres_alter_syntax(&self) -> String { - let mut rust_type_clean = self.field_type.replace(' ',""); - let rs_type_is_optional = self.field_type.to_uppercase().starts_with("OPTION"); - - if rs_type_is_optional { - let type_regex = Regex::new(r"[Oo][Pp][Tt][Ii][Oo][Nn]<(?P[\w<>]+)>").unwrap(); - let capture_rust_type = type_regex.captures(rust_type_clean.as_str()).unwrap(); - rust_type_clean = capture_rust_type.name("rust_type").unwrap().as_str().to_string(); - } - - let mut postgres_type = String::new(); - - match rust_type_clean.as_str() { - rust_type::I32 => postgres_type.push_str("INTEGER"), - rust_type::OPT_I32 => postgres_type.push_str("INTEGER"), - rust_type::I64 => postgres_type.push_str("BIGINT"), - rust_type::OPT_I64 => postgres_type.push_str("BIGINT"), - rust_type::STRING => postgres_type.push_str("TEXT"), - rust_type::OPT_STRING => postgres_type.push_str("TEXT"), - rust_type::BOOL => postgres_type.push_str("BOOLEAN"), - rust_type::OPT_BOOL => postgres_type.push_str("BOOLEAN"), - rust_type::NAIVE_DATE => postgres_type.push_str("DATE"), - rust_type::OPT_NAIVE_DATE => postgres_type.push_str("DATE"), - &_ => postgres_type.push_str("DATE") - } - - postgres_type - } - - /// Return the datatype and parameters to create an id column, given the corresponding "CanyonRegisterEntityField" - fn to_postgres_id_syntax(&self) -> String { - let has_pk_annotation = self.annotations.iter().find( - |a| a.starts_with("Annotation: PrimaryKey") - ); - - let pk_is_autoincremental = match has_pk_annotation { - Some(annotation) => annotation.contains("true"), - None => false - }; - - let numeric = vec!["i16", "i32", "i64"]; - - let postgres_datatype_syntax = Self::to_postgres_syntax(self); - - if numeric.contains(&self.field_type.as_str()) && pk_is_autoincremental { - format!("{} PRIMARY KEY GENERATED ALWAYS AS IDENTITY", postgres_datatype_syntax) - } else { - format!("{} PRIMARY KEY", postgres_datatype_syntax) - } - } - - /// Return if the field is autoincremental - pub fn is_autoincremental(&self) -> bool { - let has_pk_annotation = self.annotations.iter().find( - |a| a.starts_with("Annotation: PrimaryKey") - ); - - let pk_is_autoincremental = match has_pk_annotation { - Some(annotation) => annotation.contains("true"), - None => false - }; - - let numeric = vec!["i16", "i32", "i64"]; - - numeric.contains(&self.field_type.as_str()) && pk_is_autoincremental - } - - pub fn field_type_to_postgres(&self) -> String { - let is_pk = self.annotations.iter().find( - |a| a.starts_with("Annotation: PrimaryKey") - ); - - match is_pk { - Some(_) => Self::to_postgres_id_syntax(&self), - None => Self::to_postgres_syntax(&self) - } - } -} \ No newline at end of file diff --git a/canyon_sql/Cargo.toml b/canyon_sql/Cargo.toml index c3bb7e75..029b3355 100755 --- a/canyon_sql/Cargo.toml +++ b/canyon_sql/Cargo.toml @@ -1,28 +1,14 @@ [package] name = "canyon_sql" -version = "1.0.0" +version = "0.0.1" edition = "2021" authors = ["Alex Vergara, Gonzalo Busto"] [dependencies] -tokio-postgres = { version = "0.7.2", features=["with-chrono-0_4"] } -toml = { version = "0.5.8" } -regex = "1.5" - -# Forbidden Rust dark arts -tokio = { version = "1.9.0", features = ["full"] } async-trait = { version = "0.1.50" } -futures = "0.3.21" - -# Serialization/Deserialization -serde_json = "1.0.59" -serde = "1.0.90" - -# Debug -partialdebug = "0.2.0" # Project crates -canyon_macros = { path = "../canyon_macros" } -canyon_observer = { path = "../canyon_observer" } -canyon_crud = { path = "../canyon_crud" } -canyon_connection = { path = "../canyon_connection" } +canyon_macros = { version = "0.0.1", path = "../canyon_macros" } +canyon_observer = { version = "0.0.1", path = "../canyon_observer" } +canyon_crud = { version = "0.0.1", path = "../canyon_crud" } +canyon_connection = { version = "0.0.1", path = "../canyon_connection" } diff --git a/canyon_sql/src/lib.rs b/canyon_sql/src/lib.rs index 72c962f9..330b8ed4 100755 --- a/canyon_sql/src/lib.rs +++ b/canyon_sql/src/lib.rs @@ -1,38 +1,58 @@ -// Common reexports (dependencies) -pub use tokio; -pub use async_trait; -pub use tokio_postgres; - -// Macros crate -pub extern crate canyon_macros; -pub extern crate canyon_crud; -pub extern crate canyon_observer; -pub extern crate canyon_connection; - -/// This reexports allows the users to import all the available -/// `Canyon-SQL` features in a single statement like: -/// -/// `use canyon_sql::*` -/// -/// and avoids polluting the macros with imports. -/// -/// The decision of reexports all this crates was made because the macros -/// was importing this ones already, but if two structures was defined on the -/// same file, the imported names into it collinding, avoiding let the user -/// to have multiple structs in only one file. -/// -/// This particular feature (or decision) will be opened for revision -/// 'cause it's not definitive to let this forever -pub use canyon_macros::*; -pub use canyon_observer::*; -pub use canyon_crud::*; -pub use canyon_connection::*; -pub use async_trait::*; -pub use tokio_postgres::Row; +///! The root crate of the `Canyon-SQL` project. +/// +/// Here it's where all the available functionalities and features +/// reaches the top most level, grouping them and making them visible +/// through this crate, building the *public API* of the library + +/// Reexported elements to the root of the public API +pub mod migrations { + pub use canyon_observer::migrations::{handler, processor}; +} + +/// The top level reexport. Here we define the path to some really important +/// things in `Canyon-SQL`, like the `main` macro, the IT macro. +pub use canyon_macros::main; + +/// Public API for the `Canyon-SQL` proc-macros, and for the external ones +pub mod macros { + pub use async_trait::*; + pub use canyon_macros::*; +} + +/// Crud module serves to reexport the public elements of the `canyon_crud` crate, +/// exposing them through the public API +pub mod crud { + pub use canyon_crud::bounds; + pub use canyon_crud::crud::*; + pub use canyon_crud::mapper::*; + pub use canyon_crud::result::*; + pub use canyon_crud::DatabaseType; +} + +/// Re-exports the query elements from the `crud`crate +pub mod query { + pub use canyon_crud::query_elements::operators; + pub use canyon_crud::query_elements::{query::*, query_builder::*}; +} + +/// Reexport the available database clients within Canyon +pub mod db_clients { + pub use canyon_connection::tiberius; + pub use canyon_connection::tokio_postgres; +} + +/// Reexport the needed runtime dependencies +pub mod runtime { + pub use canyon_connection::futures; + pub use canyon_connection::init_connections_cache; + pub use canyon_connection::tokio; + pub use canyon_connection::tokio_util; + pub use canyon_connection::CANYON_TOKIO_RUNTIME; +} /// Module for reexport the `chrono` crate with the allowed public and available types in Canyon pub mod date_time { pub use canyon_crud::chrono::{ - DateTime, NaiveDate, NaiveTime, NaiveDateTime, Utc, FixedOffset + DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, Utc, }; -} \ No newline at end of file +} diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 00000000..04c21b89 --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,29 @@ +services: + postgres: + image: postgres:14 + restart: always + hostname: postgres + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + logging: + options: + max-size: 10m + max-file: "3" + ports: + - '5438:5432' + volumes: + - ./postgres-data:/var/lib/postgresql/data + # copy the sql script to create tables + - ./sql/10-create_tables.sql:/docker-entrypoint-initdb.d/create_tables.sql + # copy the sql script to fill tables + - ./sql/20-fill_tables.sql:/docker-entrypoint-initdb.d/fill_tables.sql + sql-server: + container_name: sql-server + image: mcr.microsoft.com/mssql/server:2022-latest + restart: always + ports: + - "1434:1433" + environment: + MSSQL_SA_PASSWORD: "SqlServer-10" + ACCEPT_EULA: "Y" diff --git a/docker/sql/10-create_tables.sql b/docker/sql/10-create_tables.sql new file mode 100644 index 00000000..fdb87b54 --- /dev/null +++ b/docker/sql/10-create_tables.sql @@ -0,0 +1,47 @@ +CREATE TABLE public.league ( + id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + ext_id BIGINT NOT NULL, + slug TEXT NOT NULL, + name TEXT NOT NULL, + region TEXT NOT NULL, + image_url TEXT NOT NULL +); + +CREATE TABLE public.tournament ( + id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + ext_id BIGINT NOT NULL, + slug TEXT NOT NULL, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + league INTEGER REFERENCES league(id) +); + +CREATE TABLE public.player ( + id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + ext_id BIGINT NOT NULL, + first_name TEXT NOT NULL, + last_name TEXT NOT NULL, + summoner_name TEXT NOT NULL, + image_url TEXT, + role TEXT NOT NULL +); + +CREATE TABLE public.team ( + id INTEGER PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + ext_id BIGINT NOT NULL, + slug TEXT NOT NULL, + name TEXT NOT NULL, + code TEXT NOT NULL, + image_url TEXT NOT NULL, + alt_image_url TEXT, + bg_image_url TEXT, + home_league INTEGER REFERENCES league(id) +); + +-- For now, we use for out CI process the default data for postgres instances + +-- ALTER TABLE public.league OWNER TO triforce; +-- ALTER TABLE public.tournament OWNER TO triforce; +-- ALTER TABLE public.player OWNER TO triforce; +-- ALTER TABLE public.team OWNER TO triforce; +-- ALTER TABLE public.team_player OWNER TO triforce; \ No newline at end of file diff --git a/docker/sql/20-fill_tables.sql b/docker/sql/20-fill_tables.sql new file mode 100644 index 00000000..e0452f70 --- /dev/null +++ b/docker/sql/20-fill_tables.sql @@ -0,0 +1,289 @@ +-- Values for league table +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (1, 100695891328981122, 'european-masters', 'European Masters', 'EUROPE', 'http://static.lolesports.com/leagues/EM_Bug_Outline1.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (2, 101097443346691685, 'turkey-academy-league', 'TAL', 'TURKEY', 'http://static.lolesports.com/leagues/1592516072459_TAL-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (3, 101382741235120470, 'lla', 'LLA', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1592516315279_LLA-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (4, 104366947889790212, 'pcs', 'PCS', 'HONG KONG, MACAU, TAIWAN', 'http://static.lolesports.com/leagues/1592515942679_PCS-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (5, 105266074488398661, 'superliga', 'SuperLiga', 'EUROPE', 'http://static.lolesports.com/leagues/SL21-V-white.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (6, 105266088231437431, 'ultraliga', 'Ultraliga', 'EUROPE', 'http://static.lolesports.com/leagues/1639390623717_ULTRALIGA_logo_sq_cyan.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (7, 105266091639104326, 'primeleague', 'Prime League', 'EUROPE', 'http://static.lolesports.com/leagues/PrimeLeagueResized.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (8, 105266094998946936, 'pg_nationals', 'PG Nationals', 'EUROPE', 'http://static.lolesports.com/leagues/PG_Nationals_Logo_White.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (9, 105266098308571975, 'nlc', 'NLC', 'EUROPE', 'http://static.lolesports.com/leagues/1641490922073_nlc_logo.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (10, 105266101075764040, 'liga_portuguesa', 'Liga Portuguesa', 'EUROPE', 'http://static.lolesports.com/leagues/1649884876085_LPLOL_2021_ISO_G-c389e9ae85c243e4f76a8028bbd9ca1609c2d12bc47c3709a9250d1b3ca43f58.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (11, 105266103462388553, 'lfl', 'La Ligue Française', 'EUROPE', 'http://static.lolesports.com/leagues/LFL_Logo_2020_black1.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (12, 105266106309666619, 'hitpoint_masters', 'Hitpoint Masters', 'EUROPE', 'http://static.lolesports.com/leagues/1641465237186_HM_white.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (13, 105266108767593290, 'greek_legends', 'Greek Legends League', 'EUROPE', 'http://static.lolesports.com/leagues/GLL_LOGO_WHITE.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (14, 105266111679554379, 'esports_balkan_league', 'Esports Balkan League', 'EUROPE', 'http://static.lolesports.com/leagues/1625735031226_ebl_crest-whitePNG.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (15, 105549980953490846, 'cblol_academy', 'CBLOL Academy', 'BRAZIL', 'http://static.lolesports.com/leagues/cblol-acad-white.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (16, 105709090213554609, 'lco', 'LCO', 'OCEANIA', 'http://static.lolesports.com/leagues/lco-color-white.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (17, 106827757669296909, 'ljl_academy', 'LJL Academy', 'JAPAN', 'http://static.lolesports.com/leagues/1630062215891_ljl-al_logo_gradient.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (18, 107213827295848783, 'vcs', 'VCS', 'VIETNAM', 'http://static.lolesports.com/leagues/1635953171501_LOL_VCS_Full_White.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (19, 107407335299756365, 'elite_series', 'Elite Series', 'EUROPE', 'http://static.lolesports.com/leagues/1641287979138_EliteSeriesMarkWhite.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (20, 107581050201097472, 'honor_division', 'Honor Division', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1641750781829_divhonormxwhite.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (21, 107581669166925444, 'elements_league', 'Elements League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1642593573670_LOGO_ELEMENTS_White.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (22, 107582133359724496, 'volcano_discover_league', 'Volcano League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1643106609661_VOLCANO-VERTICAL-ColorLight.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (23, 107582580502415838, 'claro_gaming_stars_league', 'Stars League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1642595169468_CLARO-GAMING-STARS-LEAGUE-B.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (24, 107598636564896416, 'master_flow_league', 'Master Flow League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1643794656405_LMF-White.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (25, 107598951349015984, 'honor_league', 'Honor League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1643036660690_lhe-ColorLight.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (26, 107603541524308819, 'movistar_fiber_golden_league', 'Golden League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1642445572375_MovistarLeague.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (27, 107898214974993351, 'college_championship', 'College Championship', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/1646396098648_CollegeChampionshiplogo.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (28, 107921249454961575, 'proving_grounds', 'Proving Grounds', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/1646747578708_download8.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (29, 108001239847565215, 'tft_esports', 'TFT Last Chance Qualifier', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1649439858579_tftesport.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (30, 98767975604431411, 'worlds', 'Worlds', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1592594612171_WorldsDarkBG.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (31, 98767991295297326, 'all-star', 'All-Star Event', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1592594737227_ASEDarkBG.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (32, 98767991299243165, 'lcs', 'LCS', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/LCSNew-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (33, 98767991302996019, 'lec', 'LEC', 'EUROPE', 'http://static.lolesports.com/leagues/1592516184297_LEC-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (34, 98767991310872058, 'lck', 'LCK', 'KOREA', 'http://static.lolesports.com/leagues/lck-color-on-black.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (35, 98767991314006698, 'lpl', 'LPL', 'CHINA', 'http://static.lolesports.com/leagues/1592516115322_LPL-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (36, 98767991325878492, 'msi', 'MSI', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1592594634248_MSIDarkBG.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (37, 98767991332355509, 'cblol-brazil', 'CBLOL', 'BRAZIL', 'http://static.lolesports.com/leagues/cblol-logo-symbol-offwhite.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (38, 98767991335774713, 'lck_challengers_league', 'LCK Challengers', 'KOREA', 'http://static.lolesports.com/leagues/lck-cl-white.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (39, 98767991343597634, 'turkiye-sampiyonluk-ligi', 'TCL', 'TURKEY', 'https://lolstatic-a.akamaihd.net/esports-assets/production/league/turkiye-sampiyonluk-ligi-8r9ofb9.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (40, 98767991349978712, 'ljl-japan', 'LJL', 'JAPAN', 'http://static.lolesports.com/leagues/1592516354053_LJL-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (41, 98767991355908944, 'lcl', 'LCL', 'COMMONWEALTH OF INDEPENDENT STATES', 'http://static.lolesports.com/leagues/1593016885758_LCL-01-FullonDark.png'); +INSERT INTO public.league OVERRIDING SYSTEM VALUE VALUES (42, 99332500638116286, 'lcs-academy', 'LCS Academy', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/lcs-academy-purple.png'); + + +-- Values for player table +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (1, 98767975906852059, 'Jaehyeok', 'Park', 'Ruler', 'http://static.lolesports.com/players/1642153903692_GEN_Ruler_F.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (2, 102186485482484390, 'Hyeonjun', 'Choi', 'Doran', 'http://static.lolesports.com/players/1642153880932_GEN_Doran_F.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (3, 98767975916458257, 'Wangho ', 'Han', 'Peanut', 'http://static.lolesports.com/players/1642153896918_GEN_peanut_A.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (4, 99871276342168416, 'Jihun', 'Jung', 'Chovy', 'http://static.lolesports.com/players/1642153873969_GEN_Chovy_F.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (5, 99871276332909841, 'Siu', 'Son', 'Lehends', 'http://static.lolesports.com/players/1642153887731_GEN_Lehends_F.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (6, 104266797862156067, 'Youngjae', 'Ko', 'YoungJae', 'http://static.lolesports.com/players/1642153913037_GEN_YoungJae_F.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (7, 103495716560217968, 'Hyoseong', 'Oh', 'Vsta', 'http://static.lolesports.com/players/1642154102606_HLE_Vsta_F.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (8, 104266795407626462, 'Dongju', 'Lee', 'DuDu', 'http://static.lolesports.com/players/1642154060441_HLE_DuDu_F.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (9, 106267386230851795, 'Junghyeun', 'Kim', 'Willer', 'http://static.lolesports.com/players/1642154110676_HLE_Willer_F.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (10, 100725844995692264, 'Janggyeom', 'Kim', 'OnFleek', 'http://static.lolesports.com/players/1642154084709_HLE_Onfleek_F.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (11, 105320683858945274, 'Hongjo', 'Kim', 'Karis', 'http://static.lolesports.com/players/1642154066010_HLE_Karis_F.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (12, 104287359934240404, 'Jaehoon', 'Lee', 'SamD', 'http://static.lolesports.com/players/1642154094651_HLE_SamD_F.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (13, 103461966870841210, 'Wyllian', 'Adriano', 'asta', 'http://static.lolesports.com/players/1643226025146_Astacopy.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (14, 107559111166843860, 'Felipe', 'Boal', 'Boal', 'http://static.lolesports.com/players/1644095483228_BOALcopiar.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (15, 107559255871511679, 'Giovani', 'Baldan', 'Mito', 'http://static.lolesports.com/players/1643226193262_Mitocopy.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (16, 103478281329357326, 'Arthur', 'Machado', 'Tutsz', 'http://static.lolesports.com/players/1643226293749_Tutszcopy.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (17, 103743599797538329, 'Luiz Felipe', 'Lobo', 'Flare', 'http://static.lolesports.com/players/1643226082718_Flarecopy.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (18, 99566408210057665, 'Natan', 'Braz', 'fNb', 'http://static.lolesports.com/players/1643226467130_Fnbcopiar.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (19, 99566407771166805, 'Filipe', 'Brombilla', 'Ranger', 'http://static.lolesports.com/players/1643226495379_Rangercopiar.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (20, 107559327426244686, 'Vinícius', 'Corrêa', 'StineR', 'http://static.lolesports.com/players/1643226666563_Silhueta.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (21, 99566407784212776, 'Bruno', 'Farias', 'Envy', 'http://static.lolesports.com/players/1643226430923_Envycopiar.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (22, 107559338252333149, 'Gabriel', 'Furuuti', 'Fuuu', 'http://static.lolesports.com/players/1643226717192_Silhueta.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (23, 105397181199735591, 'Lucas', 'Fensterseifer', 'Netuno', 'http://static.lolesports.com/players/1644095521735_Netunocopiar.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (24, 98767975947296513, 'Ygor', 'Freitas', 'RedBert', 'http://static.lolesports.com/players/1643226527904_Redbertcopiar.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (25, 100754278890207800, 'Geonyeong', 'Mun', 'Steal', 'http://static.lolesports.com/players/1644905307225_dfm_steal.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (26, 99566404536983507, 'Chanju', 'Lee', 'Yaharong', 'http://static.lolesports.com/players/1644905328869_dfm_yaharong.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (27, 104016425624023728, 'Jiyoong', 'Lee', 'Harp', 'http://static.lolesports.com/players/1644905257358_dfm_harp.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (28, 98767991750309549, 'Danil', 'Reshetnikov', 'Diamondprox', 'http://static.lolesports.com/players/Diamondproxcopy.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (29, 105700748891875072, 'Nikita ', 'Gudkov', 'Griffon ', 'http://static.lolesports.com/players/1642071116433_placeholder.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (30, 105700946934214905, 'YEVHEN', 'ZAVALNYI', 'Mytant', 'http://static.lolesports.com/players/1642071138150_placeholder.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (31, 98767991755955790, 'Eduard', 'Abgaryan', 'Edward', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/gosu-pepper-88anxcql.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (32, 106301600611225723, 'Mark', 'Leksin', 'Dreampull', 'http://static.lolesports.com/players/placeholder.jpg', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (33, 107721938219680332, 'Azamat', 'Atkanov', 'TESLA', 'http://static.lolesports.com/players/1643706327509_placeholder.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (34, 100725844988653773, 'Su', 'Heo', 'ShowMaker', 'http://static.lolesports.com/players/1642153659258_DK_ShowMaker_F.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (35, 102483272156027229, 'Daegil', 'Seo', 'deokdam', 'http://static.lolesports.com/players/1642153629340_DK_deokdam_F.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (36, 101388913291808185, 'Hyeonggyu', 'Kim', 'Kellin', 'http://static.lolesports.com/players/1642153649009_DK_Kellin_F.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (37, 105705431649727017, 'Taeyoon', 'Noh', 'Burdol', 'http://static.lolesports.com/players/1642153598672_DK_Burdol_F.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (38, 103729432252832975, 'Yongho', 'Yoon', 'Hoya', 'http://static.lolesports.com/players/1642153639500_DK_Hoya_F.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (39, 105320703008048707, 'Dongbum', 'Kim', 'Croco', 'http://static.lolesports.com/players/1642154712531_LSB_Croco_R.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (40, 105501829364113001, 'Hobin', 'Jeon', 'Howling', 'http://static.lolesports.com/players/1642154731703_LSB_Howling_F.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (41, 104284310661848687, 'Juhyeon', 'Lee', 'Clozer', 'http://static.lolesports.com/players/1642154706000_LSB_Clozer_R.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (42, 100725844996918206, 'Jaeyeon', 'Kim', 'Dove', 'http://static.lolesports.com/players/1642154719503_LSB_Dove_R.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (43, 105530583598805234, 'Myeongjun', 'Lee', 'Envyy', 'http://static.lolesports.com/players/1642154726047_LSB_Envyy_F.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (44, 105530584812980593, 'Jinhong', 'Kim', 'Kael', 'http://static.lolesports.com/players/1642154745002_LSB_Kael_F.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (45, 105501834624360050, 'Sanghoon', 'Yoon', 'Ice', 'http://static.lolesports.com/players/1642154738262_LSB_Ice_F.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (46, 99322214647978964, 'Daniele', 'di Mauro', 'Jiizuke', 'http://static.lolesports.com/players/eg-jiizuke-2021.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (47, 100787602257283436, 'Minh Loc', 'Pham', 'Zeros', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/zeros-4keddu17.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (48, 104327502738107767, 'Nicolás', 'Rivero', 'Kiefer', 'http://static.lolesports.com/players/1643047365591_Kiefer-2.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (49, 102179902322952953, 'Manuel', 'Scala', 'Pancake', 'http://static.lolesports.com/players/1643047550782_Pancake-5.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (50, 105516185566739968, 'Cristóbal', 'Arróspide', 'Zothve', 'http://static.lolesports.com/players/1643047287141_Zothve-9.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (51, 99871352196477603, 'Gwanghyeop', 'Kim', 'Hoglet', 'http://static.lolesports.com/players/1643047312405_Hoglet-8.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (52, 99871352193690418, 'Changhun', 'Han', 'Luci', 'http://static.lolesports.com/players/1643047438703_Luci-5.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (53, 107635899693202699, 'Thomas', 'Garnsworthy', 'Tronthepom', 'https://static.lolesports.com/players/download.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (54, 107635905118503535, 'James', 'Craig', 'Voice', 'https://static.lolesports.com/players/download.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (55, 107635907168238086, 'Rocco', 'Potter', 'rocco521', 'https://static.lolesports.com/players/download.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (56, 107635918452357647, 'Reuben', 'Best', 'Reufury', 'https://static.lolesports.com/players/download.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (57, 107647480732814180, 'Bryce', 'Zhou', 'Meifan', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (58, 107657801460158111, 'Benny', 'Nguyen', 'District 1', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (59, 105709372540742118, 'Blake', 'Schlage', 'Azus', 'http://static.lolesports.com/players/silhouette.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (60, 106350759376304634, 'Shao', 'Zhong', 'Akano', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (61, 107634941727734818, 'Jeremy', 'Lim', 'foreigner', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (62, 105709381466108761, 'Reuben', 'Salb', 'Piglet', 'http://static.lolesports.com/players/silhouette.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (63, 105747861836427633, 'Yi', 'Chen', 'Thomas Shen', 'https://static.lolesports.com/players/download.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (64, 107657786356796634, 'Robert', 'Wells', 'Tyran', 'https://static.lolesports.com/players/download.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (65, 107657790493529410, 'Da Woon', 'Jeung', 'DaJeung', 'https://static.lolesports.com/players/download.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (66, 107657793079479518, 'Rhett', 'Wiggins', 'Vxpir', 'https://static.lolesports.com/players/download.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (67, 107698225510856278, 'Benson', 'Tsai', 'Entrust', 'https://static.lolesports.com/players/download.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (68, 103525219435043049, 'Lachlan', 'Keene-O''Keefe', 'N0body', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/n0body-einjqvyk.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (69, 101389749294612370, 'Janik', 'Bartels', 'Jenax', 'http://static.lolesports.com/players/1642003381408_jenax.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (70, 101383793865143549, 'Erik', 'Wessén', 'Treatz', 'http://static.lolesports.com/players/1642003495533_treatz.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (71, 101389737455173027, 'Daniyal ', 'Gamani', 'Sertuss', 'http://static.lolesports.com/players/1642003453914_sertuss.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (72, 99322214588927915, 'Erberk ', 'Demir', 'Gilius', 'http://static.lolesports.com/players/1642003341615_gilius.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (73, 99322214668103078, 'Matti', 'Sormunen', 'WhiteKnight', 'http://static.lolesports.com/players/1642003243059_white-knight.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (74, 100312190807221865, 'Nikolay ', 'Akatov', 'Zanzarah', 'http://static.lolesports.com/players/1642003282324_zanzarah.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (75, 99322214243134013, 'Hampus ', 'Abrahamsson', 'promisq', 'http://static.lolesports.com/players/1642003205916_promisq.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (76, 99322214620375780, 'Kasper', 'Kobberup', 'Kobbe', 'http://static.lolesports.com/players/1642003168563_kobbe.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (77, 99322214238585389, 'Patrik', 'Jiru', 'Patrik', 'http://static.lolesports.com/players/1642004060212_patrik.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (78, 105519722481834694, 'Mark', 'van Woensel', 'Markoon', 'http://static.lolesports.com/players/1642003998089_markoon.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (79, 105519724699493915, 'Hendrik', 'Reijenga', 'Advienne', 'http://static.lolesports.com/players/1642003935782_advienne.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (80, 99322214616775017, 'Erlend', 'Holm', 'Nukeduck', 'http://static.lolesports.com/players/1642004031937_nukeduck.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (81, 101389713973624205, 'Finn', 'Wiestål', 'Finn', 'http://static.lolesports.com/players/1642003970167_finn.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (82, 99322214629661297, 'Mihael', 'Mehle', 'Mikyx', 'http://static.lolesports.com/players/G2_MIKYX2021_summer.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (83, 100482247959137902, 'Emil', 'Larsson', 'Larssen', 'http://static.lolesports.com/players/1642003206398_larssen.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (84, 99322214598412197, 'Andrei', 'Pascu', 'Odoamne', 'http://static.lolesports.com/players/1642003264169_odoamne.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (85, 102181528883745160, 'Adrian', 'Trybus', 'Trymbi', 'http://static.lolesports.com/players/1642003301461_trymbi.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (86, 99566406053904433, 'Geun-seong', 'Kim', 'Malrang', 'http://static.lolesports.com/players/1642003233110_malrang.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (87, 103536921420956640, 'Markos', 'Stamkopoulos', 'Comp', 'http://static.lolesports.com/players/1642003175488_comp.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (88, 101388912808637770, 'Hanxi', 'Xia', 'Chelizi', 'http://static.lolesports.com/players/1593128001829_silhouette.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (89, 105516474039500339, 'Fei-Yang', 'Luo', 'Captain', 'http://static.lolesports.com/players/silhouette.png', 'mid'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (90, 106368709696011395, 'Seung Min', 'Han', 'Patch', 'http://static.lolesports.com/players/silhouette.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (91, 107597376599119596, 'HAOTIAN', 'BI', 'yaoyao', 'http://static.lolesports.com/players/1641805668544_placeholder.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (92, 101388912811586896, 'Zhilin', 'Su', 'Southwind', 'http://static.lolesports.com/players/1593129903866_ig-southwind-web.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (93, 101388912810603854, 'Wang', 'Ding', 'Puff', 'http://static.lolesports.com/players/1593129891452_ig-puff-web.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (94, 104287371427354335, 'Zhi-Peng', 'Tian', 'New', 'http://static.lolesports.com/players/1593132511529_rng-new-web.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (95, 107597380474228562, 'WANG', 'XIN', 'frigid', 'http://static.lolesports.com/players/1641805726386_placeholder.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (96, 104287365097341858, 'Peng', 'Guo', 'ppgod', 'http://static.lolesports.com/players/1593135580022_v5-ppgod-web.png', 'support'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (97, 103478281359738222, 'Qi-Shen ', 'Ying', 'Photic', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/photic-k1ttlyxh.png', 'bottom'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (98, 103478281402167891, 'Xiao-Long ', 'Li', 'XLB', 'http://static.lolesports.com/players/1593132528126_rng-xlb-web.png', 'jungle'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (99, 102186438403674539, 'Jaewon', 'Lee', 'Rich', 'http://static.lolesports.com/players/ns-rich.png', 'top'); +INSERT INTO public.player OVERRIDING SYSTEM VALUE VALUES (100, 99124844346233375, 'Onur', 'Ünalan', 'Zergsting', 'http://static.lolesports.com/players/1633542837856_gs-zergsting-w21.png', 'support'); + + +-- Values for team table +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (1, 100205573495116443, 'geng', 'Gen.G', 'GEN', 'http://static.lolesports.com/teams/1631819490111_geng-2021-worlds.png', 'http://static.lolesports.com/teams/1592589327624_Gen.GGEN-03-FullonLight.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/geng-bnm75bf5.png', 34); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (2, 100205573496804586, 'hanwha-life-esports', 'Hanwha Life Esports', 'HLE', 'http://static.lolesports.com/teams/1631819564399_hle-2021-worlds.png', 'http://static.lolesports.com/teams/hle-2021-color-on-light2.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/hanwha-life-esports-7kh5kjdc.png', 34); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (3, 100205576307813373, 'flamengo-esports', 'Flamengo Esports', 'FLA', 'http://static.lolesports.com/teams/1642953977323_Monograma_Branco-Vermelho.png', 'http://static.lolesports.com/teams/1642953977326_Monograma_Branco-Vermelho.png', NULL, 37); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (4, 100205576309502431, 'furia', 'FURIA', 'FUR', 'http://static.lolesports.com/teams/FURIA---black.png', 'http://static.lolesports.com/teams/FURIA---black.png', 'http://static.lolesports.com/teams/FuriaUppercutFUR.png', 37); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (5, 100285330168091787, 'detonation-focusme', 'DetonatioN FocusMe', 'DFM', 'http://static.lolesports.com/teams/1631820630246_dfm-2021-worlds.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/detonation-focusme-ajvyc8cy.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/detonation-focusme-4pgp383l.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (6, 100289931264192378, 'team-spirit', 'Team Spirit', 'TSPT', 'http://static.lolesports.com/teams/1643720491696_Whitelogo.png', 'http://static.lolesports.com/teams/1643720491697_Blacklogo.png', NULL, 41); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (7, 100725845018863243, 'dwg-kia', 'DWG KIA', 'DK', 'http://static.lolesports.com/teams/1631819456274_dwg-kia-2021-worlds.png', 'http://static.lolesports.com/teams/DK-FullonLight.png', 'http://static.lolesports.com/teams/DamwonGamingDWG.png', 34); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (8, 100725845022060229, 'liiv-sandbox', 'Liiv SANDBOX', 'LSB', 'http://static.lolesports.com/teams/liiv-sandbox-new.png', 'http://static.lolesports.com/teams/liiv-sandbox-new.png', NULL, 34); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (9, 101157821444002947, 'nexus-blitz-pro-a', 'Nexus Blitz Blue', 'NXB', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-a-esrcx58b.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-a-3w3j1cwx.png', NULL, 31); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (10, 101157821447017610, 'nexus-blitz-pro-b', 'Nexus Blitz Red', 'NXR', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-b-j6s80wmi.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-b-kjtp467.png', NULL, 31); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (11, 101383792559569368, 'all-knights', 'All Knights', 'AK', 'http://static.lolesports.com/teams/AK-Black-BG.png', 'http://static.lolesports.com/teams/AK-White-BG.png', NULL, 3); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (12, 101383792887446028, 'mammoth', 'MAMMOTH', 'MEC', 'http://static.lolesports.com/teams/1643079304055_RedMammothIcon.png', 'http://static.lolesports.com/teams/1643079304062_RedMammothIcon.png', NULL, 16); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (13, 101383792891050518, 'gravitas', 'Gravitas', 'GRV', 'http://static.lolesports.com/teams/gravitas-logo.png', 'http://static.lolesports.com/teams/gravitas-logo.png', NULL, 16); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (14, 101383793567806688, 'sk-gaming', 'SK Gaming', 'SK', 'http://static.lolesports.com/teams/1643979272144_SK_Monochrome.png', 'http://static.lolesports.com/teams/1643979272151_SK_Monochrome.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sk-gaming-2cd63tzz.png', 33); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (15, 101383793569248484, 'astralis', 'Astralis', 'AST', 'http://static.lolesports.com/teams/AST-FullonDark.png', 'http://static.lolesports.com/teams/AST-FullonLight.png', 'http://static.lolesports.com/teams/AstralisAST.png', 33); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (16, 101383793572656373, 'excel', 'EXCEL', 'XL', 'http://static.lolesports.com/teams/Excel_FullColor2.png', 'http://static.lolesports.com/teams/Excel_FullColor1.png', 'http://static.lolesports.com/teams/ExcelXL.png', 33); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (17, 101383793574360315, 'rogue', 'Rogue', 'RGE', 'http://static.lolesports.com/teams/1631819715136_rge-2021-worlds.png', NULL, 'http://static.lolesports.com/teams/1632941190948_RGE.png', 33); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (18, 101388912911039804, 'thunder-talk-gaming', 'Thunder Talk Gaming', 'TT', 'http://static.lolesports.com/teams/TT-FullonDark.png', 'http://static.lolesports.com/teams/TT-FullonLight.png', 'http://static.lolesports.com/teams/TTTT.png', 35); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (19, 101388912914513220, 'victory-five', 'Victory Five', 'V5', 'http://static.lolesports.com/teams/1592592149333_VictoryFiveV5-01-FullonDark.png', 'http://static.lolesports.com/teams/1592592149336_VictoryFiveV5-03-FullonLight.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/victory-five-ha9mq1rv.png', 35); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (20, 101422616509070746, 'galatasaray-espor', 'Galatasaray Espor', 'GS', 'http://static.lolesports.com/teams/1631820533570_galatasaray-2021-worlds.png', 'http://static.lolesports.com/teams/1631820533572_galatasaray-2021-worlds.png', 'http://static.lolesports.com/teams/1632941006301_GalatasarayGS.png', 39); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (21, 101428372598668846, 'burning-core', 'Burning Core', 'BC', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/burning-core-7q0431w1.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/burning-core-8a63k0iu.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/burning-core-fnmfa2td.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (22, 101428372600307248, 'rascal-jester', 'Rascal Jester', 'RJ', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/rascal-jester-e0g6cud0.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/rascal-jester-g32ay08v.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/rascal-jester-guqjh8kb.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (23, 101428372602011186, 'v3-esports', 'V3 Esports', 'V3', 'http://static.lolesports.com/teams/v3_500x500.png', 'http://static.lolesports.com/teams/v3_500x500.png', NULL, 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (24, 101428372603715124, 'crest-gaming-act', 'Crest Gaming Act', 'CGA', 'http://static.lolesports.com/teams/1630058341510_cga_512px.png', 'http://static.lolesports.com/teams/1630058341513_cga_512px.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/crest-gaming-act-7pkgpqa.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (25, 101428372605353526, 'sengoku-gaming', 'Sengoku Gaming', 'SG', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sengoku-gaming-ikyxjlfn.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sengoku-gaming-gnat0l9c.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sengoku-gaming-3rd8ifie.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (26, 101428372607057464, 'axiz', 'AXIZ', 'AXZ', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/axiz-frilmkic.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/axiz-fpemv4d2.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/axiz-9hiwgh3l.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (27, 101428372830010965, 'alpha-esports', 'Alpha Esports', 'ALF', 'http://static.lolesports.com/teams/1592588479686_AlphaEsportsALF-01-FullonDark.png', 'http://static.lolesports.com/teams/1592588479688_AlphaEsportsALF-03-FullonLight.png', NULL, 4); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (28, 101978171843206569, 'vega-squadron', 'Vega Squadron', 'VEG', 'http://static.lolesports.com/teams/vega.png', 'http://static.lolesports.com/teams/vega.png', NULL, 41); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (29, 102141671181705193, 'michigan-state-university', 'Michigan State University', 'MSU', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/michigan-state-university-au4vndaf.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/michigan-state-university-c5mv9du0.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (30, 102141671182557163, 'university-of-illinois', 'University of Illinois', 'UI', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-illinois-bwvscsri.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-illinois-b3jros5r.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (31, 102141671183409133, 'maryville-university', 'Maryville University', 'MU', 'http://static.lolesports.com/teams/1647541915472_200x200_MU_Logo.png', 'http://static.lolesports.com/teams/1647541915475_200x200_MU_Logo.png', NULL, 28); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (32, 102141671185047537, 'uci-esports', 'UCI Esports', 'UCI', 'http://static.lolesports.com/teams/1641604280633_UCI.png', 'http://static.lolesports.com/teams/1641548061305_LOLESPORTSICON.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (33, 102141671185899507, 'university-of-western-ontario', 'University of Western Ontario', 'UWO', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-western-ontario-9q0nn3lw.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-western-ontario-6csb5dft.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (34, 102141671186685941, 'university-of-waterloo', 'University of Waterloo', 'UW', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-waterloo-2wuni11l.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-waterloo-aghmypqf.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (35, 102141671187668983, 'nc-state-university', 'NC State University', 'NCSU', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nc-state-university-it42b898.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nc-state-university-6ey19n1w.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (36, 102235771678061291, 'fastpay-wildcats', 'fastPay Wildcats', 'IW', 'http://static.lolesports.com/teams/fastpay-wildcats.png', 'http://static.lolesports.com/teams/fastpay-wildcats.png', NULL, 39); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (37, 102747101565183056, 'nongshim-redforce', 'NongShim REDFORCE', 'NS', 'http://static.lolesports.com/teams/NSFullonDark.png', 'http://static.lolesports.com/teams/NSFullonLight.png', 'http://static.lolesports.com/teams/NongshimRedForceNS.png', 34); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (38, 102787200120306562, 'mousesports', 'Mousesports', 'MOUZ', 'http://static.lolesports.com/teams/1639486346996_PRM_MOUZ-FullColorDarkBG.png', 'http://static.lolesports.com/teams/1639486346999_PRM_MOUZ-FullColorDarkBG.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (39, 102787200124959636, 'crvena-zvezda-esports', 'Crvena Zvezda Esports', 'CZV', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/crvena-zvezda-esports-ddtlzzhd.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/crvena-zvezda-esports-ddtlzzhd.png', NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (40, 102787200126663579, 'giants', 'Giants', 'GIA', 'http://static.lolesports.com/teams/1641412992057_escudowhite.png', 'http://static.lolesports.com/teams/1641412992058_escudo_black.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (41, 102787200129022886, 'esuba', 'eSuba', 'ESB', 'http://static.lolesports.com/teams/1629209489523_esuba_full_pos.png', 'http://static.lolesports.com/teams/1629209489525_esuba_full_pos.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (42, 102787200130988976, 'asus-rog-elite', 'ASUS ROG Elite', 'ASUS', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/asus-rog-elite-iouou6l.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/asus-rog-elite-cz4z103n.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (43, 102787200132955066, 'for-the-win-esports', 'For The Win Esports', 'FTW', 'http://static.lolesports.com/teams/LPLOL_FTW-Logo1.png', 'http://static.lolesports.com/teams/LPLOL_FTW-Logo1.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (44, 102787200134790084, 'hma-fnatic-rising', 'HMA Fnatic Rising', 'FNCR', 'http://static.lolesports.com/teams/NLC_FNCR-logo.png', 'http://static.lolesports.com/teams/NLC_FNCR-logo.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (45, 102787200136756173, 'berlin-international-gaming', 'Berlin International Gaming', 'BIG', 'http://static.lolesports.com/teams/BIG-Logo-2020-White1.png', 'http://static.lolesports.com/teams/BIG-Logo-2020-White1.png', NULL, 7); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (46, 102787200138722262, 'devilsone', 'Devils.One', 'DV1', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/devilsone-bfe3xkh.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/devilsone-dmj5ivct.png', NULL, 6); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (47, 102787200143309800, 'ensure', 'eNsure', 'EN', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/ensure-5hi6e2cg.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/ensure-fehdkert.png', NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (48, 102787200145472495, 'defusekids', 'Defusekids', 'DKI', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/defusekids-finmimok.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/defusekids-wu2z0pj.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (49, 102787200147504121, 'campus-party-sparks', 'Campus Party Sparks', 'SPK', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/campus-party-sparks-5h2d1rjh.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/campus-party-sparks-72ccff49.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (50, 102787200149928963, 'we-love-gaming', 'We Love Gaming', 'WLG', 'http://static.lolesports.com/teams/WLGlogo.png', 'http://static.lolesports.com/teams/WLGlogo.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (51, 102787200151698443, 'vitalitybee', 'Vitality.Bee', 'VITB', 'http://static.lolesports.com/teams/Vitality-logo-color-outline-rgb.png', 'http://static.lolesports.com/teams/Vitality-logo-color-outline-rgb.png', NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (52, 102787200153467923, 'bcn-squad', 'BCN Squad', 'BCN', 'http://static.lolesports.com/teams/SL_BCN-Logo_White.png', 'http://static.lolesports.com/teams/SL_BCN-Logo_Dark.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (53, 102787200155434012, 'jdxl', 'JD|XL', 'JDXL', 'http://static.lolesports.com/teams/1641489535868_jdxl.png', NULL, NULL, 9); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (54, 102787200157400101, 'falkn', 'FALKN', 'FKN', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/falkn-j72aqsqk.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/falkn-dhvtpixb.png', NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (55, 102787200159169580, 'godsent', 'Godsent', 'GOD', 'http://static.lolesports.com/teams/NLC_GOD-light.png', 'http://static.lolesports.com/teams/NLC_GOD-dark.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (56, 102825747701670848, 'azules-esports', 'Azules Esports', 'UCH', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/azules-esports-ak2khbqa.png', NULL, 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/azules-esports-e8yjxxki.png', NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (57, 103461966951059521, 'evil-geniuses', 'Evil Geniuses', 'EG', 'http://static.lolesports.com/teams/1592590374862_EvilGeniusesEG-01-FullonDark.png', 'http://static.lolesports.com/teams/1592590374875_EvilGeniusesEG-03-FullonLight.png', 'http://static.lolesports.com/teams/1590003096057_EvilGeniusesEG.png', 32); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (58, 103461966965149786, 'mad-lions', 'MAD Lions', 'MAD', 'http://static.lolesports.com/teams/1631819614211_mad-2021-worlds.png', 'http://static.lolesports.com/teams/1592591395341_MadLionsMAD-03-FullonLight.png', 'http://static.lolesports.com/teams/MAD.png', 33); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (59, 103461966971048042, 'eg-academy', 'EG Academy', 'EG', 'http://static.lolesports.com/teams/1592590391188_EvilGeniusesEG-01-FullonDark.png', 'http://static.lolesports.com/teams/1592590391200_EvilGeniusesEG-03-FullonLight.png', 'http://static.lolesports.com/teams/1590003135776_EvilGeniusesEG.png', 28); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (60, 103461966975897718, 'imt-academy', 'IMT Academy', 'IMT', 'http://static.lolesports.com/teams/imt-new-color.png', 'http://static.lolesports.com/teams/imt-new-color.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/immortals-academy-hmxmnvhe.png', 28); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (61, 103461966981927044, 'dig-academy', 'DIG Academy', 'DIG', 'http://static.lolesports.com/teams/DIG-FullonDark.png', 'http://static.lolesports.com/teams/DIG-FullonLight.png', 'http://static.lolesports.com/teams/DignitasDIG.png', 28); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (62, 103461966986776720, 'ultra-prime', 'Ultra Prime', 'UP', 'http://static.lolesports.com/teams/ultraprime.png', 'http://static.lolesports.com/teams/ultraprime.png', NULL, 35); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (63, 103495716836203404, '5-ronin', '5 Ronin', '5R', 'http://static.lolesports.com/teams/5R_LOGO.png', 'http://static.lolesports.com/teams/5R_LOGO.png', NULL, 39); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (100, 104211666442891296, 'ogaming', 'O''Gaming', 'OGA', 'http://static.lolesports.com/teams/1590143833802_Ays7Gjmu_400x400.jpg', NULL, NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (64, 103495716886587312, 'besiktas', 'Beşiktaş', 'BJK', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-e-sports-club-dlw48ntu.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-e-sports-club-6ttscu28.png', NULL, 39); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (65, 103535282113853330, '5-ronin-akademi', '5 Ronin Akademi', '5R', 'http://static.lolesports.com/teams/5R_LOGO.png', 'http://static.lolesports.com/teams/5R_LOGO.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (66, 103535282119620510, 'fukuoka-softbank-hawks-gaming', 'Fukuoka SoftBank HAWKS gaming', 'SHG', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/fukuoka-softbank-hawks-gaming-b99n2uq2.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/fukuoka-softbank-hawks-gaming-4i3ympnq.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/fukuoka-softbank-hawks-gaming-4fl2jmuh.png', 40); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (67, 103535282124208038, 'pentanetgg', 'Pentanet.GG', 'PGG', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/pentanetgg-3vnqnv03.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/pentanetgg-3d4g4sbh.png', NULL, 16); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (68, 103535282135552642, 'papara-supermassive-blaze-akademi', 'Papara SuperMassive Blaze Akademi', 'SMB', 'http://static.lolesports.com/teams/1628521896643_SMBA_WHITE.png', 'http://static.lolesports.com/teams/1628521896646_SMBA_BLACK.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (69, 103535282138043022, 'fenerbahce-espor-akademi', 'Fenerbahçe Espor Akademi', 'FB', 'http://static.lolesports.com/teams/1642680283028_BANPICK_FB.png', 'http://static.lolesports.com/teams/1642680283035_BANPICK_FB.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (70, 103535282140533402, 'besiktas-akademi', 'Beşiktaş Akademi', 'BJK', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-akademi-6dlbk21d.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-akademi-fobrhai9.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (71, 103535282143744679, 'dark-passage-akademi', 'Dark Passage Akademi', 'DP', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/dark-passage-akademi-9ehs6q0l.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/dark-passage-akademi-h4x5hq6.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (72, 103535282146169523, 'info-yatrm-aurora-akademi', 'Info Yatırım Aurora Akademi', 'AUR', 'http://static.lolesports.com/teams/1642680351930_BANPICK_AUR.png', 'http://static.lolesports.com/teams/1642680351936_BANPICK_AUR.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (73, 103535282148790975, 'galakticos-akademi', 'GALAKTICOS Akademi', 'GAL', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/galakticos-akademi-4x1ww2pc.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/galakticos-akademi-dv3kn0pg.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (74, 103535282158162659, 'fastpay-wildcats-akademi', 'fastPay Wildcats Akademi', 'IW', 'http://static.lolesports.com/teams/1582880891336_IW.png', 'http://static.lolesports.com/teams/1582880891351_IW.png', NULL, 2); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (75, 103877554248683116, 'schalke-04-evolution', 'Schalke 04 Evolution', 'S04E', 'http://static.lolesports.com/teams/S04_Standard_Logo1.png', 'http://static.lolesports.com/teams/S04_Standard_Logo1.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (76, 103877589042434434, 'gamerlegion', 'GamerLegion', 'GL', 'http://static.lolesports.com/teams/1585046217463_220px-Team_GamerLegionlogo_square.png', NULL, NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (77, 103877625775457850, 'movistar-riders', 'Movistar Riders', 'MRS', 'http://static.lolesports.com/teams/1585046777741_220px-Movistar_Riderslogo_square.png', NULL, NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (78, 103877675241047720, 'ldlc-ol', 'LDLC OL', 'LDLC', 'http://static.lolesports.com/teams/LFL-LDLC-logo.png', 'http://static.lolesports.com/teams/LFL-LDLC-logo.png', NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (79, 103877737868887783, 'saim-se', 'SAIM SE', 'SSB', 'http://static.lolesports.com/teams/1585048488568_220px-SAIM_SElogo_square.png', 'http://static.lolesports.com/teams/1585048488582_220px-SAIM_SElogo_square.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (80, 103877756742242918, 'racoon', 'Racoon', 'RCN', 'http://static.lolesports.com/teams/1585048776551_220px-Racoon_(Italian_Team)logo_square.png', 'http://static.lolesports.com/teams/1585048776564_220px-Racoon_(Italian_Team)logo_square.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (81, 103877774634323825, 'ydn-gamers', 'YDN Gamers', 'YDN', 'http://static.lolesports.com/teams/1587638409857_LOGO_YDN_-trasp.png', 'http://static.lolesports.com/teams/1587638409876_LOGO_YDN_-trasp.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (82, 103877879209300619, 'vipers-inc', 'Vipers Inc', 'VIP', 'http://static.lolesports.com/teams/1585050644953_220px-Vipers_Inclogo_square.png', 'http://static.lolesports.com/teams/1585050644968_220px-Vipers_Inclogo_square.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (83, 103877891572305836, 'team-singularity', 'Team Singularity', 'SNG', 'http://static.lolesports.com/teams/NLC_SNG-light.png', 'http://static.lolesports.com/teams/NLC_SNG-logo.png', NULL, 9); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (84, 103877908090914662, 'kenty', 'Kenty', 'KEN', 'http://static.lolesports.com/teams/1585051086000_220px-Kentylogo_square.png', 'http://static.lolesports.com/teams/1585051086014_220px-Kentylogo_square.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (85, 103877925817094140, 'pigsports', 'PIGSPORTS', 'PIG', 'http://static.lolesports.com/teams/PIGSPORTS_PIG-Logo1.png', 'http://static.lolesports.com/teams/PIGSPORTS_PIG-Logo1.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (86, 103877951616192529, 'cyber-gaming', 'Cyber Gaming', 'CG', 'http://static.lolesports.com/teams/1585051749524_220px-Cyber_Gaminglogo_square.png', 'http://static.lolesports.com/teams/1585051749529_220px-Cyber_Gaminglogo_square.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (87, 103877976717529187, 'intrepid-fox-gaming', 'Intrepid Fox Gaming', 'IF', 'http://static.lolesports.com/teams/1585052132267_220px-Intrepid_Fox_Gaminglogo_square.png', 'http://static.lolesports.com/teams/1585052132281_220px-Intrepid_Fox_Gaminglogo_square.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (88, 103878020539746273, 'egn-esports', 'EGN Esports', 'EGN', 'http://static.lolesports.com/teams/LPLOL_EGN-Logo1.png', 'http://static.lolesports.com/teams/LPLOL_EGN-Logo1.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (89, 103935421249833954, 'mad-lions-madrid', 'MAD Lions Madrid', 'MADM', 'http://static.lolesports.com/teams/SL_MADM-Logo_white.png', 'http://static.lolesports.com/teams/SL_MADM-Logo_dark.png', NULL, 5); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (90, 103935446548920777, 'misfits-premier', 'Misfits Premier', 'MSFP', 'http://static.lolesports.com/teams/LFL-MSFP-logo.png', 'http://static.lolesports.com/teams/LFL-MSFP-logo.png', NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (91, 103935468920814040, 'gamersorigin', 'GamersOrigin', 'GO', 'http://static.lolesports.com/teams/1588178480033_logoGO_2020_G_Blanc.png', 'http://static.lolesports.com/teams/1588178480035_logoGO_2020_G_Noir.png', NULL, 11); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (92, 103935523328473675, 'k1ck-neosurf', 'K1CK Neosurf', 'K1', 'http://static.lolesports.com/teams/1585930223604_K1ck_Neosurflogo_square.png', NULL, NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (93, 103935530333072898, 'ago-rogue', 'AGO Rogue', 'RGO', 'http://static.lolesports.com/teams/1585930330127_AGO_ROGUElogo_square.png', NULL, NULL, 1); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (94, 103935567188806885, 'energypot-wizards', 'Energypot Wizards', 'EWIZ', 'http://static.lolesports.com/teams/1585930892362_Energypot_Wizardslogo_square.png', NULL, NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (95, 103935642731826448, 'sector-one', 'Sector One', 'S1', 'http://static.lolesports.com/teams/1641288621852_1024x1024_sector_one_nameless_white.png', 'http://static.lolesports.com/teams/1641288621854_1024x1024_sector_one_nameless_black.png', NULL, 19); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (96, 103963647433204351, 'm19', 'M19', 'M19', 'http://static.lolesports.com/teams/1586359360406_M19logo_square.png', NULL, NULL, NULL); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (97, 103963715924353674, 'dragon-army', 'Dragon Army', 'DA', 'http://static.lolesports.com/teams/1586360405423_440px-Dragon_Armylogo_square.png', NULL, NULL, 41); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (98, 103963753080578719, 'crowcrowd-moscow', 'CrowCrowd Moscow', 'CC', 'http://static.lolesports.com/teams/Logo_CC.png', NULL, NULL, 41); +INSERT INTO public.team OVERRIDING SYSTEM VALUE VALUES (99, 104202382255290736, 'rensga', 'RENSGA', 'RNS', 'http://static.lolesports.com/teams/LogoRensgaEsports.png', 'http://static.lolesports.com/teams/LogoRensgaEsports.png', 'http://static.lolesports.com/teams/RensgaRNS.png', 37); + + +-- Values for tournament table +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (1, 107893386210553711, 'european_masters_spring_2022_main_event', '2022-04-13', '2022-05-08', 1); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (2, 107530554766055254, 'lla_opening_2022', '2022-01-28', '2022-04-17', 3); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (3, 107693721179065689, 'pcs_2022_spring', '2022-02-11', '2022-04-18', 4); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (4, 107468241207873310, 'superliga_2022_spring', '2022-01-09', '2022-05-01', 5); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (5, 107416436272657995, 'ultraliga_2022_spring', '2022-01-01', '2022-05-01', 6); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (6, 107417741193036913, 'prime_2022_spring', '2022-01-01', '2022-05-01', 7); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (7, 107457033672415830, 'pg_spring', '2022-01-17', '2022-05-01', 8); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (8, 107417432877679361, 'nlc_2022_spring', '2022-01-01', '2022-05-15', 9); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (9, 107468370558963709, 'lfl_2022_spring', '2022-01-09', '2022-05-01', 11); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (10, 107565607659994755, 'cblol_academy_2022', '2022-01-24', '2022-04-18', 15); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (11, 107439320897210747, 'lco_spring_2022', '2022-01-23', '2022-04-29', 16); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (12, 107563481236862420, 'eslol_spring', '2022-01-16', '2022-05-01', 19); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (13, 107682708465517027, 'discover_volcano_league_opening_2022', '2022-01-25', '2022-04-16', 22); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (14, 107728324355999617, 'master_flow_league_opening_2022', '2022-01-26', '2022-04-24', 24); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (15, 107677841285321565, 'honor_league_opening_2022', '2022-01-24', '2022-04-16', 25); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (16, 107921288851375933, 'proving_grounds_spring_2022', '2022-03-16', '2022-04-16', 28); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (17, 108097587668586485, 'tft_emea_lcq_2022', '2022-04-16', '2022-04-16', 29); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (18, 107458367237283414, 'lcs_spring_2022', '2022-02-04', '2022-04-25', 32); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (19, 107417059262120466, 'lec_2022_spring', '2022-01-01', '2022-05-15', 33); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (20, 107417779630700437, 'lpl_spring_2022', '2022-01-10', '2022-05-01', 35); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (21, 107405837336179496, 'cblol_2022_split1', '2022-01-22', '2022-04-23', 37); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (22, 107417471555810057, 'lcl_spring_2022', '2022-02-11', '2022-04-16', 41); +INSERT INTO public.tournament OVERRIDING SYSTEM VALUE VALUES (23, 107418086627198298, 'lcs_academy_2022_spring', '2022-01-19', '2022-05-31', 42); + +/*We force values on an GENERATED ALWAYS AS IDENTITY column +so we need to set the current value of each column on pg_catalog +*/ + +SELECT pg_catalog.setval('public.league_id_seq', 42, true); + +SELECT pg_catalog.setval('public.player_id_seq', 3112, true); + +SELECT pg_catalog.setval('public.team_id_seq', 642, true); + +SELECT pg_catalog.setval('public.team_player_id_seq', 3719, true); + +SELECT pg_catalog.setval('public.tournament_id_seq', 23, true); \ No newline at end of file diff --git a/tests/Cargo.toml b/tests/Cargo.toml new file mode 100644 index 00000000..a6aacb83 --- /dev/null +++ b/tests/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "tests" +version = "0.1.0" +edition = "2021" +publish = false + +[dev-dependencies] +canyon_sql = { path = "../canyon_sql" } + +[[test]] +name = "canyon_integration_tests" +path = "canyon_integration_tests.rs" \ No newline at end of file diff --git a/tests/canyon.toml b/tests/canyon.toml new file mode 100644 index 00000000..7bb56442 --- /dev/null +++ b/tests/canyon.toml @@ -0,0 +1,5 @@ +[canyon_sql] +datasources = [ + {name = 'postgres_docker', properties.db_type = 'postgresql', properties.username = 'postgres', properties.password = 'postgres', properties.host = 'localhost', properties.port = 5438, properties.db_name = 'postgres'}, + {name = 'sqlserver_docker', properties.db_type = 'sqlserver', properties.username = 'sa', properties.password = 'SqlServer-10', properties.host = 'localhost', properties.port = 1434, properties.db_name = 'master'} +] \ No newline at end of file diff --git a/tests/canyon_integration_tests.rs b/tests/canyon_integration_tests.rs new file mode 100644 index 00000000..8120ee8f --- /dev/null +++ b/tests/canyon_integration_tests.rs @@ -0,0 +1,15 @@ +use std::error::Error; + +///! Integration tests for the heart of a Canyon-SQL application, the CRUD operations. +/// +///! This tests will tests mostly the whole source code of Canyon, due to its integration nature +/// +/// Guide-style: Almost every operation in Canyon is `Result` wrapped (without the) unckecked +/// variants of the `find_all` implementations. We will go to directly `.unwrap()` the results +/// because, if there's something wrong in the code reported by the tests, we want to *panic* +/// and abort the execution. +mod crud; +mod migrations; + +mod constants; +mod tests_models; diff --git a/tests/constants.rs b/tests/constants.rs new file mode 100644 index 00000000..c54cc8d1 --- /dev/null +++ b/tests/constants.rs @@ -0,0 +1,372 @@ +///! Constant values to share accross the integration tests +pub const PSQL_DS: &str = "postgres_docker"; +pub const SQL_SERVER_DS: &str = "sqlserver_docker"; + +pub static FETCH_PUBLIC_SCHEMA: &str = +"SELECT + gi.table_name, + gi.column_name, + gi.data_type, + gi.character_maximum_length, + gi.is_nullable, + gi.column_default, + gi.numeric_precision, + gi.numeric_scale, + gi.numeric_precision_radix, + gi.datetime_precision, + gi.interval_type, + CASE WHEN starts_with(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT), 'FOREIGN KEY') + THEN CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT) ELSE NULL END AS foreign_key_info, + CASE WHEN starts_with(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT), 'FOREIGN KEY') + THEN con.conname ELSE NULL END AS foreign_key_name, + CASE WHEN starts_with(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT), 'PRIMARY KEY') + THEN CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT) ELSE NULL END AS primary_key_info, + CASE WHEN starts_with(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT), 'PRIMARY KEY') + THEN con.conname ELSE NULL END AS primary_key_name, + gi.is_identity, + gi.identity_generation +FROM + information_schema.columns AS gi +LEFT JOIN pg_catalog.pg_constraint AS con on + gi.table_name = CAST(con.conrelid::regclass AS TEXT) AND + gi.column_name = split_part(split_part(CAST(pg_catalog.pg_get_constraintdef(oid) AS TEXT),')',1),'(',2) +WHERE + table_schema = 'public';"; + +pub const SQL_SERVER_CREATE_TABLES: &str = " +IF OBJECT_ID(N'[dbo].[league]', N'U') IS NULL +BEGIN + CREATE TABLE dbo.league ( + id INT PRIMARY KEY IDENTITY, + ext_id BIGINT NOT NULL, + slug NVARCHAR(250) NOT NULL, + name NVARCHAR(250) NOT NULL, + region NVARCHAR(250) NOT NULL, + image_url NVARCHAR(250) NOT NULL + ); +END; + +IF OBJECT_ID(N'[dbo].[tournament]', N'U') IS NULL +BEGIN + CREATE TABLE dbo.tournament ( + id INT PRIMARY KEY IDENTITY, + ext_id BIGINT NOT NULL, + slug NVARCHAR(250) NOT NULL, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + league INT REFERENCES league(id) + ); +END; + +IF OBJECT_ID(N'[dbo].[player]', N'U') IS NULL +BEGIN + CREATE TABLE dbo.player ( + id INT PRIMARY KEY IDENTITY, + ext_id BIGINT NOT NULL, + first_name NVARCHAR(250) NOT NULL, + last_name NVARCHAR(250) NOT NULL, + summoner_name NVARCHAR(250) NOT NULL, + image_url NVARCHAR(250), + role NVARCHAR(250) NOT NULL + ); +END; + +IF OBJECT_ID(N'[dbo].[team]', N'U') IS NULL +BEGIN + CREATE TABLE dbo.team ( + id INT PRIMARY KEY IDENTITY, + ext_id BIGINT NOT NULL, + slug NVARCHAR(250) NOT NULL, + name NVARCHAR(250) NOT NULL, + code NVARCHAR(250) NOT NULL, + image_url NVARCHAR(250) NOT NULL, + alt_image_url NVARCHAR(250), + bg_image_url NVARCHAR(250), + home_league INT REFERENCES league(id) + ); +END; +"; + +pub const SQL_SERVER_FILL_TABLE_VALUES: &str = " +-- Values for league table +-- Values for league table +SET IDENTITY_INSERT dbo.league ON; +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (1, 100695891328981122, 'european-masters', 'European Masters', 'EUROPE', 'http://static.lolesports.com/leagues/EM_Bug_Outline1.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (2, 101097443346691685, 'turkey-academy-league', 'TAL', 'TURKEY', 'http://static.lolesports.com/leagues/1592516072459_TAL-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (3, 101382741235120470, 'lla', 'LLA', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1592516315279_LLA-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (4, 104366947889790212, 'pcs', 'PCS', 'HONG KONG, MACAU, TAIWAN', 'http://static.lolesports.com/leagues/1592515942679_PCS-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (5, 105266074488398661, 'superliga', 'SuperLiga', 'EUROPE', 'http://static.lolesports.com/leagues/SL21-V-white.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (6, 105266088231437431, 'ultraliga', 'Ultraliga', 'EUROPE', 'http://static.lolesports.com/leagues/1639390623717_ULTRALIGA_logo_sq_cyan.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (7, 105266091639104326, 'primeleague', 'Prime League', 'EUROPE', 'http://static.lolesports.com/leagues/PrimeLeagueResized.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (8, 105266094998946936, 'pg_nationals', 'PG Nationals', 'EUROPE', 'http://static.lolesports.com/leagues/PG_Nationals_Logo_White.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (9, 105266098308571975, 'nlc', 'NLC', 'EUROPE', 'http://static.lolesports.com/leagues/1641490922073_nlc_logo.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (10, 105266101075764040, 'liga_portuguesa', 'Liga Portuguesa', 'EUROPE', 'http://static.lolesports.com/leagues/1649884876085_LPLOL_2021_ISO_G-c389e9ae85c243e4f76a8028bbd9ca1609c2d12bc47c3709a9250d1b3ca43f58.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (11, 105266103462388553, 'lfl', 'La Ligue Française', 'EUROPE', 'http://static.lolesports.com/leagues/LFL_Logo_2020_black1.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (12, 105266106309666619, 'hitpoint_masters', 'Hitpoint Masters', 'EUROPE', 'http://static.lolesports.com/leagues/1641465237186_HM_white.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (13, 105266108767593290, 'greek_legends', 'Greek Legends League', 'EUROPE', 'http://static.lolesports.com/leagues/GLL_LOGO_WHITE.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (14, 105266111679554379, 'esports_balkan_league', 'Esports Balkan League', 'EUROPE', 'http://static.lolesports.com/leagues/1625735031226_ebl_crest-whitePNG.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (15, 105549980953490846, 'cblol_academy', 'CBLOL Academy', 'BRAZIL', 'http://static.lolesports.com/leagues/cblol-acad-white.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (16, 105709090213554609, 'lco', 'LCO', 'OCEANIA', 'http://static.lolesports.com/leagues/lco-color-white.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (17, 106827757669296909, 'ljl_academy', 'LJL Academy', 'JAPAN', 'http://static.lolesports.com/leagues/1630062215891_ljl-al_logo_gradient.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (18, 107213827295848783, 'vcs', 'VCS', 'VIETNAM', 'http://static.lolesports.com/leagues/1635953171501_LOL_VCS_Full_White.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (19, 107407335299756365, 'elite_series', 'Elite Series', 'EUROPE', 'http://static.lolesports.com/leagues/1641287979138_EliteSeriesMarkWhite.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (20, 107581050201097472, 'honor_division', 'Honor Division', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1641750781829_divhonormxwhite.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (21, 107581669166925444, 'elements_league', 'Elements League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1642593573670_LOGO_ELEMENTS_White.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (22, 107582133359724496, 'volcano_discover_league', 'Volcano League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1643106609661_VOLCANO-VERTICAL-ColorLight.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (23, 107582580502415838, 'claro_gaming_stars_league', 'Stars League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1642595169468_CLARO-GAMING-STARS-LEAGUE-B.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (24, 107598636564896416, 'master_flow_league', 'Master Flow League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1643794656405_LMF-White.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (25, 107598951349015984, 'honor_league', 'Honor League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1643036660690_lhe-ColorLight.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (26, 107603541524308819, 'movistar_fiber_golden_league', 'Golden League', 'LATIN AMERICA', 'http://static.lolesports.com/leagues/1642445572375_MovistarLeague.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (27, 107898214974993351, 'college_championship', 'College Championship', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/1646396098648_CollegeChampionshiplogo.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (28, 107921249454961575, 'proving_grounds', 'Proving Grounds', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/1646747578708_download8.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (29, 108001239847565215, 'tft_esports', 'TFT Last Chance Qualifier', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1649439858579_tftesport.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (30, 98767975604431411, 'worlds', 'Worlds', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1592594612171_WorldsDarkBG.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (31, 98767991295297326, 'all-star', 'All-Star Event', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1592594737227_ASEDarkBG.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (32, 98767991299243165, 'lcs', 'LCS', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/LCSNew-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (33, 98767991302996019, 'lec', 'LEC', 'EUROPE', 'http://static.lolesports.com/leagues/1592516184297_LEC-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (34, 98767991310872058, 'lck', 'LCK', 'KOREA', 'http://static.lolesports.com/leagues/lck-color-on-black.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (35, 98767991314006698, 'lpl', 'LPL', 'CHINA', 'http://static.lolesports.com/leagues/1592516115322_LPL-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (36, 98767991325878492, 'msi', 'MSI', 'INTERNATIONAL', 'http://static.lolesports.com/leagues/1592594634248_MSIDarkBG.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (37, 98767991332355509, 'cblol-brazil', 'CBLOL', 'BRAZIL', 'http://static.lolesports.com/leagues/cblol-logo-symbol-offwhite.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (38, 98767991335774713, 'lck_challengers_league', 'LCK Challengers', 'KOREA', 'http://static.lolesports.com/leagues/lck-cl-white.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (39, 98767991343597634, 'turkiye-sampiyonluk-ligi', 'TCL', 'TURKEY', 'https://lolstatic-a.akamaihd.net/esports-assets/production/league/turkiye-sampiyonluk-ligi-8r9ofb9.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (40, 98767991349978712, 'ljl-japan', 'LJL', 'JAPAN', 'http://static.lolesports.com/leagues/1592516354053_LJL-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (41, 98767991355908944, 'lcl', 'LCL', 'COMMONWEALTH OF INDEPENDENT STATES', 'http://static.lolesports.com/leagues/1593016885758_LCL-01-FullonDark.png'); +INSERT INTO dbo.league (id,ext_id,slug,name,region,image_url) VALUES (42, 99332500638116286, 'lcs-academy', 'LCS Academy', 'NORTH AMERICA', 'http://static.lolesports.com/leagues/lcs-academy-purple.png'); +SET IDENTITY_INSERT dbo.league OFF; + +-- Values for player table +SET IDENTITY_INSERT dbo.player ON; +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (1, 98767975906852059, 'Jaehyeok', 'Park', 'Ruler', 'http://static.lolesports.com/players/1642153903692_GEN_Ruler_F.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (2, 102186485482484390, 'Hyeonjun', 'Choi', 'Doran', 'http://static.lolesports.com/players/1642153880932_GEN_Doran_F.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (3, 98767975916458257, 'Wangho ', 'Han', 'Peanut', 'http://static.lolesports.com/players/1642153896918_GEN_peanut_A.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (4, 99871276342168416, 'Jihun', 'Jung', 'Chovy', 'http://static.lolesports.com/players/1642153873969_GEN_Chovy_F.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (5, 99871276332909841, 'Siu', 'Son', 'Lehends', 'http://static.lolesports.com/players/1642153887731_GEN_Lehends_F.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (6, 104266797862156067, 'Youngjae', 'Ko', 'YoungJae', 'http://static.lolesports.com/players/1642153913037_GEN_YoungJae_F.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (7, 103495716560217968, 'Hyoseong', 'Oh', 'Vsta', 'http://static.lolesports.com/players/1642154102606_HLE_Vsta_F.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (8, 104266795407626462, 'Dongju', 'Lee', 'DuDu', 'http://static.lolesports.com/players/1642154060441_HLE_DuDu_F.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (9, 106267386230851795, 'Junghyeun', 'Kim', 'Willer', 'http://static.lolesports.com/players/1642154110676_HLE_Willer_F.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (10, 100725844995692264, 'Janggyeom', 'Kim', 'OnFleek', 'http://static.lolesports.com/players/1642154084709_HLE_Onfleek_F.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (11, 105320683858945274, 'Hongjo', 'Kim', 'Karis', 'http://static.lolesports.com/players/1642154066010_HLE_Karis_F.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (12, 104287359934240404, 'Jaehoon', 'Lee', 'SamD', 'http://static.lolesports.com/players/1642154094651_HLE_SamD_F.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (13, 103461966870841210, 'Wyllian', 'Adriano', 'asta', 'http://static.lolesports.com/players/1643226025146_Astacopy.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (14, 107559111166843860, 'Felipe', 'Boal', 'Boal', 'http://static.lolesports.com/players/1644095483228_BOALcopiar.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (15, 107559255871511679, 'Giovani', 'Baldan', 'Mito', 'http://static.lolesports.com/players/1643226193262_Mitocopy.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (16, 103478281329357326, 'Arthur', 'Machado', 'Tutsz', 'http://static.lolesports.com/players/1643226293749_Tutszcopy.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (17, 103743599797538329, 'Luiz Felipe', 'Lobo', 'Flare', 'http://static.lolesports.com/players/1643226082718_Flarecopy.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (18, 99566408210057665, 'Natan', 'Braz', 'fNb', 'http://static.lolesports.com/players/1643226467130_Fnbcopiar.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (19, 99566407771166805, 'Filipe', 'Brombilla', 'Ranger', 'http://static.lolesports.com/players/1643226495379_Rangercopiar.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (20, 107559327426244686, 'Vinícius', 'Corrêa', 'StineR', 'http://static.lolesports.com/players/1643226666563_Silhueta.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (21, 99566407784212776, 'Bruno', 'Farias', 'Envy', 'http://static.lolesports.com/players/1643226430923_Envycopiar.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (22, 107559338252333149, 'Gabriel', 'Furuuti', 'Fuuu', 'http://static.lolesports.com/players/1643226717192_Silhueta.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (23, 105397181199735591, 'Lucas', 'Fensterseifer', 'Netuno', 'http://static.lolesports.com/players/1644095521735_Netunocopiar.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (24, 98767975947296513, 'Ygor', 'Freitas', 'RedBert', 'http://static.lolesports.com/players/1643226527904_Redbertcopiar.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (25, 100754278890207800, 'Geonyeong', 'Mun', 'Steal', 'http://static.lolesports.com/players/1644905307225_dfm_steal.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (26, 99566404536983507, 'Chanju', 'Lee', 'Yaharong', 'http://static.lolesports.com/players/1644905328869_dfm_yaharong.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (27, 104016425624023728, 'Jiyoong', 'Lee', 'Harp', 'http://static.lolesports.com/players/1644905257358_dfm_harp.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (28, 98767991750309549, 'Danil', 'Reshetnikov', 'Diamondprox', 'http://static.lolesports.com/players/Diamondproxcopy.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (29, 105700748891875072, 'Nikita ', 'Gudkov', 'Griffon ', 'http://static.lolesports.com/players/1642071116433_placeholder.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (30, 105700946934214905, 'YEVHEN', 'ZAVALNYI', 'Mytant', 'http://static.lolesports.com/players/1642071138150_placeholder.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (31, 98767991755955790, 'Eduard', 'Abgaryan', 'Edward', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/gosu-pepper-88anxcql.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (32, 106301600611225723, 'Mark', 'Leksin', 'Dreampull', 'http://static.lolesports.com/players/placeholder.jpg', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (33, 107721938219680332, 'Azamat', 'Atkanov', 'TESLA', 'http://static.lolesports.com/players/1643706327509_placeholder.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (34, 100725844988653773, 'Su', 'Heo', 'ShowMaker', 'http://static.lolesports.com/players/1642153659258_DK_ShowMaker_F.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (35, 102483272156027229, 'Daegil', 'Seo', 'deokdam', 'http://static.lolesports.com/players/1642153629340_DK_deokdam_F.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (36, 101388913291808185, 'Hyeonggyu', 'Kim', 'Kellin', 'http://static.lolesports.com/players/1642153649009_DK_Kellin_F.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (37, 105705431649727017, 'Taeyoon', 'Noh', 'Burdol', 'http://static.lolesports.com/players/1642153598672_DK_Burdol_F.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (38, 103729432252832975, 'Yongho', 'Yoon', 'Hoya', 'http://static.lolesports.com/players/1642153639500_DK_Hoya_F.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (39, 105320703008048707, 'Dongbum', 'Kim', 'Croco', 'http://static.lolesports.com/players/1642154712531_LSB_Croco_R.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (40, 105501829364113001, 'Hobin', 'Jeon', 'Howling', 'http://static.lolesports.com/players/1642154731703_LSB_Howling_F.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (41, 104284310661848687, 'Juhyeon', 'Lee', 'Clozer', 'http://static.lolesports.com/players/1642154706000_LSB_Clozer_R.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (42, 100725844996918206, 'Jaeyeon', 'Kim', 'Dove', 'http://static.lolesports.com/players/1642154719503_LSB_Dove_R.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (43, 105530583598805234, 'Myeongjun', 'Lee', 'Envyy', 'http://static.lolesports.com/players/1642154726047_LSB_Envyy_F.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (44, 105530584812980593, 'Jinhong', 'Kim', 'Kael', 'http://static.lolesports.com/players/1642154745002_LSB_Kael_F.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (45, 105501834624360050, 'Sanghoon', 'Yoon', 'Ice', 'http://static.lolesports.com/players/1642154738262_LSB_Ice_F.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (46, 99322214647978964, 'Daniele', 'di Mauro', 'Jiizuke', 'http://static.lolesports.com/players/eg-jiizuke-2021.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (47, 100787602257283436, 'Minh Loc', 'Pham', 'Zeros', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/zeros-4keddu17.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (48, 104327502738107767, 'Nicolás', 'Rivero', 'Kiefer', 'http://static.lolesports.com/players/1643047365591_Kiefer-2.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (49, 102179902322952953, 'Manuel', 'Scala', 'Pancake', 'http://static.lolesports.com/players/1643047550782_Pancake-5.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (50, 105516185566739968, 'Cristóbal', 'Arróspide', 'Zothve', 'http://static.lolesports.com/players/1643047287141_Zothve-9.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (51, 99871352196477603, 'Gwanghyeop', 'Kim', 'Hoglet', 'http://static.lolesports.com/players/1643047312405_Hoglet-8.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (52, 99871352193690418, 'Changhun', 'Han', 'Luci', 'http://static.lolesports.com/players/1643047438703_Luci-5.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (53, 107635899693202699, 'Thomas', 'Garnsworthy', 'Tronthepom', 'https://static.lolesports.com/players/download.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (54, 107635905118503535, 'James', 'Craig', 'Voice', 'https://static.lolesports.com/players/download.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (55, 107635907168238086, 'Rocco', 'Potter', 'rocco521', 'https://static.lolesports.com/players/download.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (56, 107635918452357647, 'Reuben', 'Best', 'Reufury', 'https://static.lolesports.com/players/download.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (57, 107647480732814180, 'Bryce', 'Zhou', 'Meifan', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (58, 107657801460158111, 'Benny', 'Nguyen', 'District 1', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (59, 105709372540742118, 'Blake', 'Schlage', 'Azus', 'http://static.lolesports.com/players/silhouette.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (60, 106350759376304634, 'Shao', 'Zhong', 'Akano', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (61, 107634941727734818, 'Jeremy', 'Lim', 'foreigner', 'https://static.lolesports.com/players/download.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (62, 105709381466108761, 'Reuben', 'Salb', 'Piglet', 'http://static.lolesports.com/players/silhouette.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (63, 105747861836427633, 'Yi', 'Chen', 'Thomas Shen', 'https://static.lolesports.com/players/download.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (64, 107657786356796634, 'Robert', 'Wells', 'Tyran', 'https://static.lolesports.com/players/download.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (65, 107657790493529410, 'Da Woon', 'Jeung', 'DaJeung', 'https://static.lolesports.com/players/download.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (66, 107657793079479518, 'Rhett', 'Wiggins', 'Vxpir', 'https://static.lolesports.com/players/download.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (67, 107698225510856278, 'Benson', 'Tsai', 'Entrust', 'https://static.lolesports.com/players/download.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (68, 103525219435043049, 'Lachlan', 'Keene-O''Keefe', 'N0body', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/n0body-einjqvyk.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (69, 101389749294612370, 'Janik', 'Bartels', 'Jenax', 'http://static.lolesports.com/players/1642003381408_jenax.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (70, 101383793865143549, 'Erik', 'Wessén', 'Treatz', 'http://static.lolesports.com/players/1642003495533_treatz.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (71, 101389737455173027, 'Daniyal ', 'Gamani', 'Sertuss', 'http://static.lolesports.com/players/1642003453914_sertuss.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (72, 99322214588927915, 'Erberk ', 'Demir', 'Gilius', 'http://static.lolesports.com/players/1642003341615_gilius.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (73, 99322214668103078, 'Matti', 'Sormunen', 'WhiteKnight', 'http://static.lolesports.com/players/1642003243059_white-knight.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (74, 100312190807221865, 'Nikolay ', 'Akatov', 'Zanzarah', 'http://static.lolesports.com/players/1642003282324_zanzarah.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (75, 99322214243134013, 'Hampus ', 'Abrahamsson', 'promisq', 'http://static.lolesports.com/players/1642003205916_promisq.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (76, 99322214620375780, 'Kasper', 'Kobberup', 'Kobbe', 'http://static.lolesports.com/players/1642003168563_kobbe.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (77, 99322214238585389, 'Patrik', 'Jiru', 'Patrik', 'http://static.lolesports.com/players/1642004060212_patrik.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (78, 105519722481834694, 'Mark', 'van Woensel', 'Markoon', 'http://static.lolesports.com/players/1642003998089_markoon.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (79, 105519724699493915, 'Hendrik', 'Reijenga', 'Advienne', 'http://static.lolesports.com/players/1642003935782_advienne.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (80, 99322214616775017, 'Erlend', 'Holm', 'Nukeduck', 'http://static.lolesports.com/players/1642004031937_nukeduck.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (81, 101389713973624205, 'Finn', 'Wiestål', 'Finn', 'http://static.lolesports.com/players/1642003970167_finn.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (82, 99322214629661297, 'Mihael', 'Mehle', 'Mikyx', 'http://static.lolesports.com/players/G2_MIKYX2021_summer.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (83, 100482247959137902, 'Emil', 'Larsson', 'Larssen', 'http://static.lolesports.com/players/1642003206398_larssen.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (84, 99322214598412197, 'Andrei', 'Pascu', 'Odoamne', 'http://static.lolesports.com/players/1642003264169_odoamne.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (85, 102181528883745160, 'Adrian', 'Trybus', 'Trymbi', 'http://static.lolesports.com/players/1642003301461_trymbi.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (86, 99566406053904433, 'Geun-seong', 'Kim', 'Malrang', 'http://static.lolesports.com/players/1642003233110_malrang.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (87, 103536921420956640, 'Markos', 'Stamkopoulos', 'Comp', 'http://static.lolesports.com/players/1642003175488_comp.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (88, 101388912808637770, 'Hanxi', 'Xia', 'Chelizi', 'http://static.lolesports.com/players/1593128001829_silhouette.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (89, 105516474039500339, 'Fei-Yang', 'Luo', 'Captain', 'http://static.lolesports.com/players/silhouette.png', 'mid'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (90, 106368709696011395, 'Seung Min', 'Han', 'Patch', 'http://static.lolesports.com/players/silhouette.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (91, 107597376599119596, 'HAOTIAN', 'BI', 'yaoyao', 'http://static.lolesports.com/players/1641805668544_placeholder.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (92, 101388912811586896, 'Zhilin', 'Su', 'Southwind', 'http://static.lolesports.com/players/1593129903866_ig-southwind-web.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (93, 101388912810603854, 'Wang', 'Ding', 'Puff', 'http://static.lolesports.com/players/1593129891452_ig-puff-web.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (94, 104287371427354335, 'Zhi-Peng', 'Tian', 'New', 'http://static.lolesports.com/players/1593132511529_rng-new-web.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (95, 107597380474228562, 'WANG', 'XIN', 'frigid', 'http://static.lolesports.com/players/1641805726386_placeholder.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (96, 104287365097341858, 'Peng', 'Guo', 'ppgod', 'http://static.lolesports.com/players/1593135580022_v5-ppgod-web.png', 'support'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (97, 103478281359738222, 'Qi-Shen ', 'Ying', 'Photic', 'https://lolstatic-a.akamaihd.net/esports-assets/production/player/photic-k1ttlyxh.png', 'bottom'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (98, 103478281402167891, 'Xiao-Long ', 'Li', 'XLB', 'http://static.lolesports.com/players/1593132528126_rng-xlb-web.png', 'jungle'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (99, 102186438403674539, 'Jaewon', 'Lee', 'Rich', 'http://static.lolesports.com/players/ns-rich.png', 'top'); +INSERT INTO dbo.player (id,ext_id, first_name, last_name, summoner_name, image_url, role) VALUES (100, 99124844346233375, 'Onur', 'Ünalan', 'Zergsting', 'http://static.lolesports.com/players/1633542837856_gs-zergsting-w21.png', 'support'); +SET IDENTITY_INSERT dbo.player OFF; + +-- Values for team table +SET IDENTITY_INSERT dbo.team ON; +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (1, 100205573495116443, 'geng', 'Gen.G', 'GEN', 'http://static.lolesports.com/teams/1631819490111_geng-2021-worlds.png', 'http://static.lolesports.com/teams/1592589327624_Gen.GGEN-03-FullonLight.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/geng-bnm75bf5.png', 34); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (2, 100205573496804586, 'hanwha-life-esports', 'Hanwha Life Esports', 'HLE', 'http://static.lolesports.com/teams/1631819564399_hle-2021-worlds.png', 'http://static.lolesports.com/teams/hle-2021-color-on-light2.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/hanwha-life-esports-7kh5kjdc.png', 34); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (3, 100205576307813373, 'flamengo-esports', 'Flamengo Esports', 'FLA', 'http://static.lolesports.com/teams/1642953977323_Monograma_Branco-Vermelho.png', 'http://static.lolesports.com/teams/1642953977326_Monograma_Branco-Vermelho.png', NULL, 37); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (4, 100205576309502431, 'furia', 'FURIA', 'FUR', 'http://static.lolesports.com/teams/FURIA---black.png', 'http://static.lolesports.com/teams/FURIA---black.png', 'http://static.lolesports.com/teams/FuriaUppercutFUR.png', 37); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (5, 100285330168091787, 'detonation-focusme', 'DetonatioN FocusMe', 'DFM', 'http://static.lolesports.com/teams/1631820630246_dfm-2021-worlds.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/detonation-focusme-ajvyc8cy.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/detonation-focusme-4pgp383l.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (6, 100289931264192378, 'team-spirit', 'Team Spirit', 'TSPT', 'http://static.lolesports.com/teams/1643720491696_Whitelogo.png', 'http://static.lolesports.com/teams/1643720491697_Blacklogo.png', NULL, 41); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (7, 100725845018863243, 'dwg-kia', 'DWG KIA', 'DK', 'http://static.lolesports.com/teams/1631819456274_dwg-kia-2021-worlds.png', 'http://static.lolesports.com/teams/DK-FullonLight.png', 'http://static.lolesports.com/teams/DamwonGamingDWG.png', 34); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (8, 100725845022060229, 'liiv-sandbox', 'Liiv SANDBOX', 'LSB', 'http://static.lolesports.com/teams/liiv-sandbox-new.png', 'http://static.lolesports.com/teams/liiv-sandbox-new.png', NULL, 34); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (9, 101157821444002947, 'nexus-blitz-pro-a', 'Nexus Blitz Blue', 'NXB', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-a-esrcx58b.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-a-3w3j1cwx.png', NULL, 31); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (10, 101157821447017610, 'nexus-blitz-pro-b', 'Nexus Blitz Red', 'NXR', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-b-j6s80wmi.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nexus-blitz-pro-b-kjtp467.png', NULL, 31); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (11, 101383792559569368, 'all-knights', 'All Knights', 'AK', 'http://static.lolesports.com/teams/AK-Black-BG.png', 'http://static.lolesports.com/teams/AK-White-BG.png', NULL, 3); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (12, 101383792887446028, 'mammoth', 'MAMMOTH', 'MEC', 'http://static.lolesports.com/teams/1643079304055_RedMammothIcon.png', 'http://static.lolesports.com/teams/1643079304062_RedMammothIcon.png', NULL, 16); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (13, 101383792891050518, 'gravitas', 'Gravitas', 'GRV', 'http://static.lolesports.com/teams/gravitas-logo.png', 'http://static.lolesports.com/teams/gravitas-logo.png', NULL, 16); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (14, 101383793567806688, 'sk-gaming', 'SK Gaming', 'SK', 'http://static.lolesports.com/teams/1643979272144_SK_Monochrome.png', 'http://static.lolesports.com/teams/1643979272151_SK_Monochrome.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sk-gaming-2cd63tzz.png', 33); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (15, 101383793569248484, 'astralis', 'Astralis', 'AST', 'http://static.lolesports.com/teams/AST-FullonDark.png', 'http://static.lolesports.com/teams/AST-FullonLight.png', 'http://static.lolesports.com/teams/AstralisAST.png', 33); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (16, 101383793572656373, 'excel', 'EXCEL', 'XL', 'http://static.lolesports.com/teams/Excel_FullColor2.png', 'http://static.lolesports.com/teams/Excel_FullColor1.png', 'http://static.lolesports.com/teams/ExcelXL.png', 33); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (17, 101383793574360315, 'rogue', 'Rogue', 'RGE', 'http://static.lolesports.com/teams/1631819715136_rge-2021-worlds.png', NULL, 'http://static.lolesports.com/teams/1632941190948_RGE.png', 33); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (18, 101388912911039804, 'thunder-talk-gaming', 'Thunder Talk Gaming', 'TT', 'http://static.lolesports.com/teams/TT-FullonDark.png', 'http://static.lolesports.com/teams/TT-FullonLight.png', 'http://static.lolesports.com/teams/TTTT.png', 35); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (19, 101388912914513220, 'victory-five', 'Victory Five', 'V5', 'http://static.lolesports.com/teams/1592592149333_VictoryFiveV5-01-FullonDark.png', 'http://static.lolesports.com/teams/1592592149336_VictoryFiveV5-03-FullonLight.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/victory-five-ha9mq1rv.png', 35); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (20, 101422616509070746, 'galatasaray-espor', 'Galatasaray Espor', 'GS', 'http://static.lolesports.com/teams/1631820533570_galatasaray-2021-worlds.png', 'http://static.lolesports.com/teams/1631820533572_galatasaray-2021-worlds.png', 'http://static.lolesports.com/teams/1632941006301_GalatasarayGS.png', 39); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (21, 101428372598668846, 'burning-core', 'Burning Core', 'BC', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/burning-core-7q0431w1.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/burning-core-8a63k0iu.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/burning-core-fnmfa2td.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (22, 101428372600307248, 'rascal-jester', 'Rascal Jester', 'RJ', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/rascal-jester-e0g6cud0.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/rascal-jester-g32ay08v.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/rascal-jester-guqjh8kb.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (23, 101428372602011186, 'v3-esports', 'V3 Esports', 'V3', 'http://static.lolesports.com/teams/v3_500x500.png', 'http://static.lolesports.com/teams/v3_500x500.png', NULL, 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (24, 101428372603715124, 'crest-gaming-act', 'Crest Gaming Act', 'CGA', 'http://static.lolesports.com/teams/1630058341510_cga_512px.png', 'http://static.lolesports.com/teams/1630058341513_cga_512px.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/crest-gaming-act-7pkgpqa.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (25, 101428372605353526, 'sengoku-gaming', 'Sengoku Gaming', 'SG', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sengoku-gaming-ikyxjlfn.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sengoku-gaming-gnat0l9c.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/sengoku-gaming-3rd8ifie.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (26, 101428372607057464, 'axiz', 'AXIZ', 'AXZ', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/axiz-frilmkic.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/axiz-fpemv4d2.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/axiz-9hiwgh3l.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (27, 101428372830010965, 'alpha-esports', 'Alpha Esports', 'ALF', 'http://static.lolesports.com/teams/1592588479686_AlphaEsportsALF-01-FullonDark.png', 'http://static.lolesports.com/teams/1592588479688_AlphaEsportsALF-03-FullonLight.png', NULL, 4); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (28, 101978171843206569, 'vega-squadron', 'Vega Squadron', 'VEG', 'http://static.lolesports.com/teams/vega.png', 'http://static.lolesports.com/teams/vega.png', NULL, 41); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (29, 102141671181705193, 'michigan-state-university', 'Michigan State University', 'MSU', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/michigan-state-university-au4vndaf.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/michigan-state-university-c5mv9du0.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (30, 102141671182557163, 'university-of-illinois', 'University of Illinois', 'UI', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-illinois-bwvscsri.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-illinois-b3jros5r.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (31, 102141671183409133, 'maryville-university', 'Maryville University', 'MU', 'http://static.lolesports.com/teams/1647541915472_200x200_MU_Logo.png', 'http://static.lolesports.com/teams/1647541915475_200x200_MU_Logo.png', NULL, 28); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (32, 102141671185047537, 'uci-esports', 'UCI Esports', 'UCI', 'http://static.lolesports.com/teams/1641604280633_UCI.png', 'http://static.lolesports.com/teams/1641548061305_LOLESPORTSICON.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (33, 102141671185899507, 'university-of-western-ontario', 'University of Western Ontario', 'UWO', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-western-ontario-9q0nn3lw.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-western-ontario-6csb5dft.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (34, 102141671186685941, 'university-of-waterloo', 'University of Waterloo', 'UW', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-waterloo-2wuni11l.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/university-of-waterloo-aghmypqf.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (35, 102141671187668983, 'nc-state-university', 'NC State University', 'NCSU', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nc-state-university-it42b898.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/nc-state-university-6ey19n1w.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (36, 102235771678061291, 'fastpay-wildcats', 'fastPay Wildcats', 'IW', 'http://static.lolesports.com/teams/fastpay-wildcats.png', 'http://static.lolesports.com/teams/fastpay-wildcats.png', NULL, 39); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (37, 102747101565183056, 'nongshim-redforce', 'NongShim REDFORCE', 'NS', 'http://static.lolesports.com/teams/NSFullonDark.png', 'http://static.lolesports.com/teams/NSFullonLight.png', 'http://static.lolesports.com/teams/NongshimRedForceNS.png', 34); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (38, 102787200120306562, 'mousesports', 'Mousesports', 'MOUZ', 'http://static.lolesports.com/teams/1639486346996_PRM_MOUZ-FullColorDarkBG.png', 'http://static.lolesports.com/teams/1639486346999_PRM_MOUZ-FullColorDarkBG.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (39, 102787200124959636, 'crvena-zvezda-esports', 'Crvena Zvezda Esports', 'CZV', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/crvena-zvezda-esports-ddtlzzhd.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/crvena-zvezda-esports-ddtlzzhd.png', NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (40, 102787200126663579, 'giants', 'Giants', 'GIA', 'http://static.lolesports.com/teams/1641412992057_escudowhite.png', 'http://static.lolesports.com/teams/1641412992058_escudo_black.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (41, 102787200129022886, 'esuba', 'eSuba', 'ESB', 'http://static.lolesports.com/teams/1629209489523_esuba_full_pos.png', 'http://static.lolesports.com/teams/1629209489525_esuba_full_pos.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (42, 102787200130988976, 'asus-rog-elite', 'ASUS ROG Elite', 'ASUS', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/asus-rog-elite-iouou6l.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/asus-rog-elite-cz4z103n.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (43, 102787200132955066, 'for-the-win-esports', 'For The Win Esports', 'FTW', 'http://static.lolesports.com/teams/LPLOL_FTW-Logo1.png', 'http://static.lolesports.com/teams/LPLOL_FTW-Logo1.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (44, 102787200134790084, 'hma-fnatic-rising', 'HMA Fnatic Rising', 'FNCR', 'http://static.lolesports.com/teams/NLC_FNCR-logo.png', 'http://static.lolesports.com/teams/NLC_FNCR-logo.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (45, 102787200136756173, 'berlin-international-gaming', 'Berlin International Gaming', 'BIG', 'http://static.lolesports.com/teams/BIG-Logo-2020-White1.png', 'http://static.lolesports.com/teams/BIG-Logo-2020-White1.png', NULL, 7); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (46, 102787200138722262, 'devilsone', 'Devils.One', 'DV1', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/devilsone-bfe3xkh.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/devilsone-dmj5ivct.png', NULL, 6); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (47, 102787200143309800, 'ensure', 'eNsure', 'EN', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/ensure-5hi6e2cg.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/ensure-fehdkert.png', NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (48, 102787200145472495, 'defusekids', 'Defusekids', 'DKI', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/defusekids-finmimok.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/defusekids-wu2z0pj.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (49, 102787200147504121, 'campus-party-sparks', 'Campus Party Sparks', 'SPK', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/campus-party-sparks-5h2d1rjh.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/campus-party-sparks-72ccff49.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (50, 102787200149928963, 'we-love-gaming', 'We Love Gaming', 'WLG', 'http://static.lolesports.com/teams/WLGlogo.png', 'http://static.lolesports.com/teams/WLGlogo.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (51, 102787200151698443, 'vitalitybee', 'Vitality.Bee', 'VITB', 'http://static.lolesports.com/teams/Vitality-logo-color-outline-rgb.png', 'http://static.lolesports.com/teams/Vitality-logo-color-outline-rgb.png', NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (52, 102787200153467923, 'bcn-squad', 'BCN Squad', 'BCN', 'http://static.lolesports.com/teams/SL_BCN-Logo_White.png', 'http://static.lolesports.com/teams/SL_BCN-Logo_Dark.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (53, 102787200155434012, 'jdxl', 'JD|XL', 'JDXL', 'http://static.lolesports.com/teams/1641489535868_jdxl.png', NULL, NULL, 9); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (54, 102787200157400101, 'falkn', 'FALKN', 'FKN', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/falkn-j72aqsqk.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/falkn-dhvtpixb.png', NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (55, 102787200159169580, 'godsent', 'Godsent', 'GOD', 'http://static.lolesports.com/teams/NLC_GOD-light.png', 'http://static.lolesports.com/teams/NLC_GOD-dark.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (56, 102825747701670848, 'azules-esports', 'Azules Esports', 'UCH', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/azules-esports-ak2khbqa.png', NULL, 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/azules-esports-e8yjxxki.png', NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (57, 103461966951059521, 'evil-geniuses', 'Evil Geniuses', 'EG', 'http://static.lolesports.com/teams/1592590374862_EvilGeniusesEG-01-FullonDark.png', 'http://static.lolesports.com/teams/1592590374875_EvilGeniusesEG-03-FullonLight.png', 'http://static.lolesports.com/teams/1590003096057_EvilGeniusesEG.png', 32); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (58, 103461966965149786, 'mad-lions', 'MAD Lions', 'MAD', 'http://static.lolesports.com/teams/1631819614211_mad-2021-worlds.png', 'http://static.lolesports.com/teams/1592591395341_MadLionsMAD-03-FullonLight.png', 'http://static.lolesports.com/teams/MAD.png', 33); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (59, 103461966971048042, 'eg-academy', 'EG Academy', 'EG', 'http://static.lolesports.com/teams/1592590391188_EvilGeniusesEG-01-FullonDark.png', 'http://static.lolesports.com/teams/1592590391200_EvilGeniusesEG-03-FullonLight.png', 'http://static.lolesports.com/teams/1590003135776_EvilGeniusesEG.png', 28); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (60, 103461966975897718, 'imt-academy', 'IMT Academy', 'IMT', 'http://static.lolesports.com/teams/imt-new-color.png', 'http://static.lolesports.com/teams/imt-new-color.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/immortals-academy-hmxmnvhe.png', 28); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (61, 103461966981927044, 'dig-academy', 'DIG Academy', 'DIG', 'http://static.lolesports.com/teams/DIG-FullonDark.png', 'http://static.lolesports.com/teams/DIG-FullonLight.png', 'http://static.lolesports.com/teams/DignitasDIG.png', 28); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (62, 103461966986776720, 'ultra-prime', 'Ultra Prime', 'UP', 'http://static.lolesports.com/teams/ultraprime.png', 'http://static.lolesports.com/teams/ultraprime.png', NULL, 35); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (63, 103495716836203404, '5-ronin', '5 Ronin', '5R', 'http://static.lolesports.com/teams/5R_LOGO.png', 'http://static.lolesports.com/teams/5R_LOGO.png', NULL, 39); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (100, 104211666442891296, 'ogaming', 'O''Gaming', 'OGA', 'http://static.lolesports.com/teams/1590143833802_Ays7Gjmu_400x400.jpg', NULL, NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (64, 103495716886587312, 'besiktas', 'Beşiktaş', 'BJK', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-e-sports-club-dlw48ntu.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-e-sports-club-6ttscu28.png', NULL, 39); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (65, 103535282113853330, '5-ronin-akademi', '5 Ronin Akademi', '5R', 'http://static.lolesports.com/teams/5R_LOGO.png', 'http://static.lolesports.com/teams/5R_LOGO.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (66, 103535282119620510, 'fukuoka-softbank-hawks-gaming', 'Fukuoka SoftBank HAWKS gaming', 'SHG', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/fukuoka-softbank-hawks-gaming-b99n2uq2.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/fukuoka-softbank-hawks-gaming-4i3ympnq.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/fukuoka-softbank-hawks-gaming-4fl2jmuh.png', 40); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (67, 103535282124208038, 'pentanetgg', 'Pentanet.GG', 'PGG', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/pentanetgg-3vnqnv03.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/pentanetgg-3d4g4sbh.png', NULL, 16); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (68, 103535282135552642, 'papara-supermassive-blaze-akademi', 'Papara SuperMassive Blaze Akademi', 'SMB', 'http://static.lolesports.com/teams/1628521896643_SMBA_WHITE.png', 'http://static.lolesports.com/teams/1628521896646_SMBA_BLACK.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (69, 103535282138043022, 'fenerbahce-espor-akademi', 'Fenerbahçe Espor Akademi', 'FB', 'http://static.lolesports.com/teams/1642680283028_BANPICK_FB.png', 'http://static.lolesports.com/teams/1642680283035_BANPICK_FB.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (70, 103535282140533402, 'besiktas-akademi', 'Beşiktaş Akademi', 'BJK', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-akademi-6dlbk21d.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/besiktas-akademi-fobrhai9.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (71, 103535282143744679, 'dark-passage-akademi', 'Dark Passage Akademi', 'DP', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/dark-passage-akademi-9ehs6q0l.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/dark-passage-akademi-h4x5hq6.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (72, 103535282146169523, 'info-yatrm-aurora-akademi', 'Info Yatırım Aurora Akademi', 'AUR', 'http://static.lolesports.com/teams/1642680351930_BANPICK_AUR.png', 'http://static.lolesports.com/teams/1642680351936_BANPICK_AUR.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (73, 103535282148790975, 'galakticos-akademi', 'GALAKTICOS Akademi', 'GAL', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/galakticos-akademi-4x1ww2pc.png', 'https://lolstatic-a.akamaihd.net/esports-assets/production/team/galakticos-akademi-dv3kn0pg.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (74, 103535282158162659, 'fastpay-wildcats-akademi', 'fastPay Wildcats Akademi', 'IW', 'http://static.lolesports.com/teams/1582880891336_IW.png', 'http://static.lolesports.com/teams/1582880891351_IW.png', NULL, 2); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (75, 103877554248683116, 'schalke-04-evolution', 'Schalke 04 Evolution', 'S04E', 'http://static.lolesports.com/teams/S04_Standard_Logo1.png', 'http://static.lolesports.com/teams/S04_Standard_Logo1.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (76, 103877589042434434, 'gamerlegion', 'GamerLegion', 'GL', 'http://static.lolesports.com/teams/1585046217463_220px-Team_GamerLegionlogo_square.png', NULL, NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (77, 103877625775457850, 'movistar-riders', 'Movistar Riders', 'MRS', 'http://static.lolesports.com/teams/1585046777741_220px-Movistar_Riderslogo_square.png', NULL, NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (78, 103877675241047720, 'ldlc-ol', 'LDLC OL', 'LDLC', 'http://static.lolesports.com/teams/LFL-LDLC-logo.png', 'http://static.lolesports.com/teams/LFL-LDLC-logo.png', NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (79, 103877737868887783, 'saim-se', 'SAIM SE', 'SSB', 'http://static.lolesports.com/teams/1585048488568_220px-SAIM_SElogo_square.png', 'http://static.lolesports.com/teams/1585048488582_220px-SAIM_SElogo_square.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (80, 103877756742242918, 'racoon', 'Racoon', 'RCN', 'http://static.lolesports.com/teams/1585048776551_220px-Racoon_(Italian_Team)logo_square.png', 'http://static.lolesports.com/teams/1585048776564_220px-Racoon_(Italian_Team)logo_square.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (81, 103877774634323825, 'ydn-gamers', 'YDN Gamers', 'YDN', 'http://static.lolesports.com/teams/1587638409857_LOGO_YDN_-trasp.png', 'http://static.lolesports.com/teams/1587638409876_LOGO_YDN_-trasp.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (82, 103877879209300619, 'vipers-inc', 'Vipers Inc', 'VIP', 'http://static.lolesports.com/teams/1585050644953_220px-Vipers_Inclogo_square.png', 'http://static.lolesports.com/teams/1585050644968_220px-Vipers_Inclogo_square.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (83, 103877891572305836, 'team-singularity', 'Team Singularity', 'SNG', 'http://static.lolesports.com/teams/NLC_SNG-light.png', 'http://static.lolesports.com/teams/NLC_SNG-logo.png', NULL, 9); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (84, 103877908090914662, 'kenty', 'Kenty', 'KEN', 'http://static.lolesports.com/teams/1585051086000_220px-Kentylogo_square.png', 'http://static.lolesports.com/teams/1585051086014_220px-Kentylogo_square.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (85, 103877925817094140, 'pigsports', 'PIGSPORTS', 'PIG', 'http://static.lolesports.com/teams/PIGSPORTS_PIG-Logo1.png', 'http://static.lolesports.com/teams/PIGSPORTS_PIG-Logo1.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (86, 103877951616192529, 'cyber-gaming', 'Cyber Gaming', 'CG', 'http://static.lolesports.com/teams/1585051749524_220px-Cyber_Gaminglogo_square.png', 'http://static.lolesports.com/teams/1585051749529_220px-Cyber_Gaminglogo_square.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (87, 103877976717529187, 'intrepid-fox-gaming', 'Intrepid Fox Gaming', 'IF', 'http://static.lolesports.com/teams/1585052132267_220px-Intrepid_Fox_Gaminglogo_square.png', 'http://static.lolesports.com/teams/1585052132281_220px-Intrepid_Fox_Gaminglogo_square.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (88, 103878020539746273, 'egn-esports', 'EGN Esports', 'EGN', 'http://static.lolesports.com/teams/LPLOL_EGN-Logo1.png', 'http://static.lolesports.com/teams/LPLOL_EGN-Logo1.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (89, 103935421249833954, 'mad-lions-madrid', 'MAD Lions Madrid', 'MADM', 'http://static.lolesports.com/teams/SL_MADM-Logo_white.png', 'http://static.lolesports.com/teams/SL_MADM-Logo_dark.png', NULL, 5); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (90, 103935446548920777, 'misfits-premier', 'Misfits Premier', 'MSFP', 'http://static.lolesports.com/teams/LFL-MSFP-logo.png', 'http://static.lolesports.com/teams/LFL-MSFP-logo.png', NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (91, 103935468920814040, 'gamersorigin', 'GamersOrigin', 'GO', 'http://static.lolesports.com/teams/1588178480033_logoGO_2020_G_Blanc.png', 'http://static.lolesports.com/teams/1588178480035_logoGO_2020_G_Noir.png', NULL, 11); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (92, 103935523328473675, 'k1ck-neosurf', 'K1CK Neosurf', 'K1', 'http://static.lolesports.com/teams/1585930223604_K1ck_Neosurflogo_square.png', NULL, NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (93, 103935530333072898, 'ago-rogue', 'AGO Rogue', 'RGO', 'http://static.lolesports.com/teams/1585930330127_AGO_ROGUElogo_square.png', NULL, NULL, 1); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (94, 103935567188806885, 'energypot-wizards', 'Energypot Wizards', 'EWIZ', 'http://static.lolesports.com/teams/1585930892362_Energypot_Wizardslogo_square.png', NULL, NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (95, 103935642731826448, 'sector-one', 'Sector One', 'S1', 'http://static.lolesports.com/teams/1641288621852_1024x1024_sector_one_nameless_white.png', 'http://static.lolesports.com/teams/1641288621854_1024x1024_sector_one_nameless_black.png', NULL, 19); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (96, 103963647433204351, 'm19', 'M19', 'M19', 'http://static.lolesports.com/teams/1586359360406_M19logo_square.png', NULL, NULL, NULL); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (97, 103963715924353674, 'dragon-army', 'Dragon Army', 'DA', 'http://static.lolesports.com/teams/1586360405423_440px-Dragon_Armylogo_square.png', NULL, NULL, 41); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (98, 103963753080578719, 'crowcrowd-moscow', 'CrowCrowd Moscow', 'CC', 'http://static.lolesports.com/teams/Logo_CC.png', NULL, NULL, 41); +INSERT INTO dbo.team (id, ext_id, slug, name, code, image_url, alt_image_url, bg_image_url, home_league) VALUES (99, 104202382255290736, 'rensga', 'RENSGA', 'RNS', 'http://static.lolesports.com/teams/LogoRensgaEsports.png', 'http://static.lolesports.com/teams/LogoRensgaEsports.png', 'http://static.lolesports.com/teams/RensgaRNS.png', 37); +SET IDENTITY_INSERT dbo.team OFF; + +-- Values for tournament table +SET IDENTITY_INSERT dbo.tournament ON; +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (1, 107893386210553711, 'european_masters_spring_2022_main_event', '2022-04-13', '2022-05-08', 1); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (2, 107530554766055254, 'lla_opening_2022', '2022-01-28', '2022-04-17', 3); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (3, 107693721179065689, 'pcs_2022_spring', '2022-02-11', '2022-04-18', 4); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (4, 107468241207873310, 'superliga_2022_spring', '2022-01-09', '2022-05-01', 5); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (5, 107416436272657995, 'ultraliga_2022_spring', '2022-01-01', '2022-05-01', 6); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (6, 107417741193036913, 'prime_2022_spring', '2022-01-01', '2022-05-01', 7); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (7, 107457033672415830, 'pg_spring', '2022-01-17', '2022-05-01', 8); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (8, 107417432877679361, 'nlc_2022_spring', '2022-01-01', '2022-05-15', 9); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (9, 107468370558963709, 'lfl_2022_spring', '2022-01-09', '2022-05-01', 11); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (10, 107565607659994755, 'cblol_academy_2022', '2022-01-24', '2022-04-18', 15); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (11, 107439320897210747, 'lco_spring_2022', '2022-01-23', '2022-04-29', 16); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (12, 107563481236862420, 'eslol_spring', '2022-01-16', '2022-05-01', 19); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (13, 107682708465517027, 'discover_volcano_league_opening_2022', '2022-01-25', '2022-04-16', 22); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (14, 107728324355999617, 'master_flow_league_opening_2022', '2022-01-26', '2022-04-24', 24); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (15, 107677841285321565, 'honor_league_opening_2022', '2022-01-24', '2022-04-16', 25); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (16, 107921288851375933, 'proving_grounds_spring_2022', '2022-03-16', '2022-04-16', 28); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (17, 108097587668586485, 'tft_emea_lcq_2022', '2022-04-16', '2022-04-16', 29); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (18, 107458367237283414, 'lcs_spring_2022', '2022-02-04', '2022-04-25', 32); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (19, 107417059262120466, 'lec_2022_spring', '2022-01-01', '2022-05-15', 33); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (20, 107417779630700437, 'lpl_spring_2022', '2022-01-10', '2022-05-01', 35); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (21, 107405837336179496, 'cblol_2022_split1', '2022-01-22', '2022-04-23', 37); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (22, 107417471555810057, 'lcl_spring_2022', '2022-02-11', '2022-04-16', 41); +INSERT INTO dbo.tournament (id, ext_id, slug, start_date, end_date, league) VALUES (23, 107418086627198298, 'lcs_academy_2022_spring', '2022-01-19', '2022-05-31', 42); +SET IDENTITY_INSERT dbo.tournament OFF; +"; diff --git a/tests/crud/delete_operations.rs b/tests/crud/delete_operations.rs new file mode 100644 index 00000000..29349bbf --- /dev/null +++ b/tests/crud/delete_operations.rs @@ -0,0 +1,104 @@ +///! Integration tests for the CRUD operations available in `Canyon` that +///! generates and executes *INSERT* statements +use canyon_sql::crud::CrudOperations; + +use crate::constants::{PSQL_DS, SQL_SERVER_DS}; +use crate::tests_models::league::*; + +/// Deletes a row from the database that is mapped into some instance of a `T` entity. +/// +/// The `t.delete(&self)` operation is only enabled for types that +/// has, at least, one of it's fields annotated with a `#[primary_key]` +/// operation, because we use that concrete field to construct the clause that targets +/// that entity. +/// +/// Attemp of usage the `t.delete(&self)` method on an entity without `#[primary_key]` +/// will raise a runtime error. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_delete_method_operation() { + // For test the delete, we will insert a new instance of the database, and then, + // after inspect it, we will proceed to delete it + let mut new_league: League = League { + id: Default::default(), + ext_id: 7892635306594_i64, + slug: "some-new-league".to_string(), + name: "Some New League".to_string(), + region: "Bahía de cochinos".to_string(), + image_url: "https://nobodyspectsandimage.io".to_string(), + }; + + // We insert the instance on the database, on the `League` entity + new_league.insert().await.expect("Failed insert operation"); + + assert_eq!( + new_league.id, + League::find_by_pk_datasource(&new_league.id, PSQL_DS) + .await + .expect("Request error") + .expect("None value") + .id + ); + + // Now that we have an instance mapped to some entity by a primary key, we can now + // remove that entry from the database with the delete operation + new_league + .delete() + .await + .expect("Failed to delete the operation"); + + // To check the success, we can query by the primary key value and check if, after unwrap() + // the result of the operation, the find by primary key contains Some(v) or None + // Remeber that `find_by_primary_key(&dyn QueryParameters<'a>) -> Result>, Err> + assert_eq!( + League::find_by_pk(&new_league.id) + .await + .expect("Unwrapping the result, letting the Option"), + None + ); +} + +/// Same as the delete test, but performing the operations with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_delete_datasource_method_operation() { + // For test the delete, we will insert a new instance of the database, and then, + // after inspect it, we will proceed to delete it + let mut new_league: League = League { + id: Default::default(), + ext_id: 7892635306594_i64, + slug: "some-new-league".to_string(), + name: "Some New League".to_string(), + region: "Bahía de cochinos".to_string(), + image_url: "https://nobodyspectsandimage.io".to_string(), + }; + + // We insert the instance on the database, on the `League` entity + new_league + .insert_datasource(SQL_SERVER_DS) + .await + .expect("Failed insert operation"); + assert_eq!( + new_league.id, + League::find_by_pk_datasource(&new_league.id, SQL_SERVER_DS) + .await + .expect("Request error") + .expect("None value") + .id + ); + + // Now that we have an instance mapped to some entity by a primary key, we can now + // remove that entry from the database with the delete operation + new_league + .delete_datasource(SQL_SERVER_DS) + .await + .expect("Failed to delete the operation"); + + // To check the success, we can query by the primary key value and check if, after unwrap() + // the result of the operation, the find by primary key contains Some(v) or None + // Remeber that `find_by_primary_key(&dyn QueryParameters<'a>) -> Result>, Err> + assert_eq!( + League::find_by_pk_datasource(&new_league.id, SQL_SERVER_DS) + .await + .expect("Unwrapping the result, letting the Option"), + None + ); +} diff --git a/tests/crud/foreign_key_operations.rs b/tests/crud/foreign_key_operations.rs new file mode 100644 index 00000000..ce0ea585 --- /dev/null +++ b/tests/crud/foreign_key_operations.rs @@ -0,0 +1,107 @@ +///! Integration tests for the CRUD operations available in `Canyon` that +///! generates and executes *SELECT* statements based on a entity +///! annotated with the `#[foreign_key(... args)]` annotation looking +///! for the related data with some entity `U` that acts as is parent, where `U` +///! impls `ForeignKeyable` (isn't requiered, but it won't unlock the +///! reverse search features parent -> child, only the child -> parent ones). +/// +///! Names of the foreign key methods are autogenerated for the direct and +///! reverse side of the implementations. +///! For more info: TODO -> Link to the docs of the foreign key chapter +use canyon_sql::crud::CrudOperations; + +use crate::constants::SQL_SERVER_DS; +use crate::tests_models::league::*; +use crate::tests_models::tournament::*; + +/// Given an entity `T` which has some field declaring a foreign key relation +/// with some another entity `U`, for example, performns a search to find +/// what is the parent type `U` of `T` +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_search_by_foreign_key() { + let some_tournament: Tournament = Tournament::find_by_pk(&1) + .await + .expect("Result variant of the query is err") + .expect("No result found for the given parameter"); + + // We can get the parent entity for the retrieved child instance + let parent_entity: Option = some_tournament + .search_league() + .await + .expect("Result variant of the query is err"); + + if let Some(league) = parent_entity { + assert_eq!(some_tournament.league, league.id) + } else { + assert_eq!(parent_entity, None) + } +} + +/// Same as the search by foreign key, but with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_search_by_foreign_key_datasource() { + let some_tournament: Tournament = Tournament::find_by_pk_datasource(&10, SQL_SERVER_DS) + .await + .expect("Result variant of the query is err") + .expect("No result found for the given parameter"); + + // We can get the parent entity for the retrieved child instance + let parent_entity: Option = some_tournament + .search_league_datasource(SQL_SERVER_DS) + .await + .expect("Result variant of the query is err"); + + // These are tests, and we could unwrap the result contained in the option, because + // it always should exist that search for the data inserted when the docker starts. + // But, just for change the style a little bit and offer more options about how to + // handle things done with Canyon + if let Some(league) = parent_entity { + assert_eq!(some_tournament.league, league.id) + } else { + assert_eq!(parent_entity, None) + } +} + +/// Given an entity `U` that is know as the "parent" side of the relation with another +/// entity `T`, for example, we can ask to the parent for the childrens that belongs +/// to `U`. +/// +/// For this to work, `U`, the parent, must have derived the `ForeignKeyable` proc macro +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_search_reverse_side_foreign_key() { + let some_league: League = League::find_by_pk(&1) + .await + .expect("Result variant of the query is err") + .expect("No result found for the given parameter"); + + // Computes how many tournaments are poiting to the retrieved league + let child_tournaments: Vec = Tournament::search_league_childrens(&some_league) + .await + .expect("Result variant of the query is err"); + + assert!(!child_tournaments.is_empty()); + child_tournaments + .iter() + .for_each(|t| assert_eq!(t.league, some_league.id)); +} + +/// Same as the search by the reverse side of a foreign key relation +/// but with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_search_reverse_side_foreign_key_datasource() { + let some_league: League = League::find_by_pk_datasource(&1, SQL_SERVER_DS) + .await + .expect("Result variant of the query is err") + .expect("No result found for the given parameter"); + + // Computes how many tournaments are poiting to the retrieved league + let child_tournaments: Vec = + Tournament::search_league_childrens_datasource(&some_league, SQL_SERVER_DS) + .await + .expect("Result variant of the query is err"); + + assert!(!child_tournaments.is_empty()); + child_tournaments + .iter() + .for_each(|t| assert_eq!(t.league, some_league.id)); +} diff --git a/tests/crud/insert_operations.rs b/tests/crud/insert_operations.rs new file mode 100644 index 00000000..480dc06f --- /dev/null +++ b/tests/crud/insert_operations.rs @@ -0,0 +1,215 @@ +///! Integration tests for the CRUD operations available in `Canyon` that +///! generates and executes *INSERT* statements +use canyon_sql::crud::CrudOperations; + +use crate::constants::SQL_SERVER_DS; +use crate::tests_models::league::*; + +/// Inserts a new record on the database, given an entity that is +/// annotated with `#[canyon_entity]` macro over a *T* type. +/// +/// For insert a new record on a database, the *insert* operation needs +/// some special requeriments: +/// > - We need a mutable instance of `T`. If the operation complets +/// succesfully, the insert operation will automatically set the autogenerated +/// value for the `primary_key` annotated field in it. +/// +/// > - It's considered a good practice to initialize that concrete field with +/// the `Default` trait, because the value on the primary key field will be +/// ignored at the execution time of the insert, and updated with the autogenerated +/// value by the database. +/// +/// By default, the `#[primary_key]` annotation means autogenerated and autoincremental. +/// You can configure not autoincremental via macro annotation parameters (please, +/// refer to the docs [here]() for more info.) +/// +/// If the type hasn't a `#[primary_key]` annotation, or the annotation contains +/// an argument specifiying not autoincremental behaviour, all the fields will be +/// inserted on the database and no returning value will be placed in any field. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_insert_operation() { + let mut new_league: League = League { + id: Default::default(), + ext_id: 7892635306594_i64, + slug: "some-new-league".to_string(), + name: "Some New League".to_string(), + region: "Bahía de cochinos".to_string(), + image_url: "https://nobodyspectsandimage.io".to_string(), + }; + + // We insert the instance on the database, on the `League` entity + new_league.insert().await.expect("Failed insert operation"); + + // Now, in the `id` field of the instance, we have the autogenerated + // value for the primary key field, which is id. So, we can query the + // database again with the find by primary key operation to check if + // the value was really inserted + let inserted_league = League::find_by_pk(&new_league.id) + .await + .expect("Failed the query to the database") + .expect("No entity found for the primary key value passed in"); + + assert_eq!(new_league.id, inserted_league.id); +} + +/// Same as the insert operation above, but targeting the database defined in +/// the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_insert_datasource_operation() { + let mut new_league: League = League { + id: Default::default(), + ext_id: 7892635306594_i64, + slug: "some-new-league".to_string(), + name: "Some New League".to_string(), + region: "Bahía de cochinos".to_string(), + image_url: "https://nobodyspectsandimage.io".to_string(), + }; + + // We insert the instance on the database, on the `League` entity + new_league + .insert_datasource(SQL_SERVER_DS) + .await + .expect("Failed insert datasource operation"); + + // Now, in the `id` field of the instance, we have the autogenerated + // value for the primary key field, which is id. So, we can query the + // database again with the find by primary key operation to check if + // the value was really inserted + let inserted_league = League::find_by_pk_datasource(&new_league.id, SQL_SERVER_DS) + .await + .expect("Failed the query to the database") + .expect("No entity found for the primary key value passed in"); + + assert_eq!(new_league.id, inserted_league.id); +} + +/// The multi insert operation is a shorthand for insert multiple instances of *T* +/// in the database at once. +/// +/// It works pretty much the same that the insert operation, with the same behaviour +/// of the `#[primary_key]` annotation over some field. It will auto set the primary +/// key field with the autogenerated value on the database on the insert operation, but +/// for every entity passed in as an array of mutable instances of `T`. +/// +/// The instances without `#[primary_key]` inserts all the values on the instaqce fields +/// on the database. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_multi_insert_operation() { + let mut new_league_mi: League = League { + id: Default::default(), + ext_id: 54376478_i64, + slug: "some-new-random-league".to_string(), + name: "Some New Random League".to_string(), + region: "Unknown".to_string(), + image_url: "https://what-a-league.io".to_string(), + }; + let mut new_league_mi_2: League = League { + id: Default::default(), + ext_id: 3475689769678906_i64, + slug: "new-league-2".to_string(), + name: "New League 2".to_string(), + region: "Really unknown".to_string(), + image_url: "https://what-an-unknown-league.io".to_string(), + }; + let mut new_league_mi_3: League = League { + id: Default::default(), + ext_id: 46756867_i64, + slug: "a-new-multinsert".to_string(), + name: "New League 3".to_string(), + region: "The dark side of the moon".to_string(), + image_url: "https://interplanetary-league.io".to_string(), + }; + + // Insert the instance as database entities + new_league_mi + .insert() + .await + .expect("Failed insert datasource operation"); + new_league_mi_2 + .insert() + .await + .expect("Failed insert datasource operation"); + new_league_mi_3 + .insert() + .await + .expect("Failed insert datasource operation"); + + // Recover the inserted data by primary key + let inserted_league = League::find_by_pk(&new_league_mi.id) + .await + .expect("[1] - Failed the query to the database") + .expect("[1] - No entity found for the primary key value passed in"); + let inserted_league_2 = League::find_by_pk(&new_league_mi_2.id) + .await + .expect("[2] - Failed the query to the database") + .expect("[2] - No entity found for the primary key value passed in"); + let inserted_league_3 = League::find_by_pk(&new_league_mi_3.id) + .await + .expect("[3] - Failed the query to the database") + .expect("[3] - No entity found for the primary key value passed in"); + + assert_eq!(new_league_mi.id, inserted_league.id); + assert_eq!(new_league_mi_2.id, inserted_league_2.id); + assert_eq!(new_league_mi_3.id, inserted_league_3.id); +} + +/// Same as the multi insert above, but with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_multi_insert_datasource_operation() { + let mut new_league_mi: League = League { + id: Default::default(), + ext_id: 54376478_i64, + slug: "some-new-random-league".to_string(), + name: "Some New Random League".to_string(), + region: "Unknown".to_string(), + image_url: "https://what-a-league.io".to_string(), + }; + let mut new_league_mi_2: League = League { + id: Default::default(), + ext_id: 3475689769678906_i64, + slug: "new-league-2".to_string(), + name: "New League 2".to_string(), + region: "Really unknown".to_string(), + image_url: "https://what-an-unknown-league.io".to_string(), + }; + let mut new_league_mi_3: League = League { + id: Default::default(), + ext_id: 46756867_i64, + slug: "a-new-multinsert".to_string(), + name: "New League 3".to_string(), + region: "The dark side of the moon".to_string(), + image_url: "https://interplanetary-league.io".to_string(), + }; + + // Insert the instance as database entities + new_league_mi + .insert_datasource(SQL_SERVER_DS) + .await + .expect("Failed insert datasource operation"); + new_league_mi_2 + .insert_datasource(SQL_SERVER_DS) + .await + .expect("Failed insert datasource operation"); + new_league_mi_3 + .insert_datasource(SQL_SERVER_DS) + .await + .expect("Failed insert datasource operation"); + + // Recover the inserted data by primary key + let inserted_league = League::find_by_pk_datasource(&new_league_mi.id, SQL_SERVER_DS) + .await + .expect("[1] - Failed the query to the database") + .expect("[1] - No entity found for the primary key value passed in"); + let inserted_league_2 = League::find_by_pk_datasource(&new_league_mi_2.id, SQL_SERVER_DS) + .await + .expect("[2] - Failed the query to the database") + .expect("[2] - No entity found for the primary key value passed in"); + let inserted_league_3 = League::find_by_pk_datasource(&new_league_mi_3.id, SQL_SERVER_DS) + .await + .expect("[3] - Failed the query to the database") + .expect("[3] - No entity found for the primary key value passed in"); + + assert_eq!(new_league_mi.id, inserted_league.id); + assert_eq!(new_league_mi_2.id, inserted_league_2.id); + assert_eq!(new_league_mi_3.id, inserted_league_3.id); +} diff --git a/tests/crud/mod.rs b/tests/crud/mod.rs new file mode 100644 index 00000000..8568b2a9 --- /dev/null +++ b/tests/crud/mod.rs @@ -0,0 +1,69 @@ +pub mod delete_operations; +pub mod foreign_key_operations; +pub mod insert_operations; +pub mod querybuilder_operations; +pub mod select_operations; +pub mod update_operations; + +use crate::constants::SQL_SERVER_CREATE_TABLES; +use crate::constants::SQL_SERVER_DS; +use crate::constants::SQL_SERVER_FILL_TABLE_VALUES; +use crate::tests_models::league::League; + +use canyon_sql::crud::CrudOperations; +use canyon_sql::db_clients::tiberius::{Client, Config}; +use canyon_sql::runtime::tokio::net::TcpStream; +use canyon_sql::runtime::tokio_util::compat::TokioAsyncWriteCompatExt; + +/// In order to initialize data on `SqlServer`. we must manually insert it +/// when the docker starts. SqlServer official docker from Microsoft does +/// not allow you to run `.sql` files against the database (not at least, without) +/// using a workaround. So, we are going to query the `SqlServer` to check if already +/// has some data (other processes, persistance or multi-threading envs), af if not, +/// we are going to retrieve the inserted data on the `postgreSQL` at start-up and +/// inserting into the `SqlServer` instance. +/// +/// This will be marked as `#[ignore]`, so we can force to run first the marked as +/// ignored, check the data available, perform the necessary init operations and +/// then *cargo test * the real integration tests +#[canyon_sql::macros::canyon_tokio_test] +#[ignore] +fn initialize_sql_server_docker_instance() { + canyon_sql::runtime::futures::executor::block_on(async { + static CONN_STR: &str = + "server=tcp:localhost,1434;User Id=SA;Password=SqlServer-10;TrustServerCertificate=true"; + + let config = Config::from_ado_string(CONN_STR).unwrap(); + + let tcp = TcpStream::connect(config.get_addr()).await.unwrap(); + let tcp2 = TcpStream::connect(config.get_addr()).await.unwrap(); + tcp.set_nodelay(true).ok(); + + let mut client = Client::connect(config.clone(), tcp.compat_write()) + .await + .unwrap(); + + // Create the tables + let query_result = client.query(SQL_SERVER_CREATE_TABLES, &[]).await; + assert!(query_result.is_ok()); + + let leagues_sql = League::find_all_datasource(SQL_SERVER_DS).await; + println!("LSQL ERR: {leagues_sql:?}"); + assert!(leagues_sql.is_ok()); + + match leagues_sql { + Ok(ref leagues) => { + let leagues_len = leagues.len(); + println!("Leagues already inserted on SQLSERVER: {:?}", &leagues_len); + if leagues.len() < 10 { + let mut client2 = Client::connect(config, tcp2.compat_write()) + .await + .expect("Can't connect to MSSQL"); + let result = client2.query(SQL_SERVER_FILL_TABLE_VALUES, &[]).await; + assert!(result.is_ok()); + } + } + Err(e) => eprintln!("Error retrieving the leagues: {e}"), + } + }); +} diff --git a/tests/crud/querybuilder_operations.rs b/tests/crud/querybuilder_operations.rs new file mode 100644 index 00000000..7b46fa24 --- /dev/null +++ b/tests/crud/querybuilder_operations.rs @@ -0,0 +1,250 @@ +///! Tests for the QueryBuilder available operations within Canyon. +/// +///! QueryBuilder are the way of obtain more flexibility that with +///! the default generated queries, esentially for build the queries +///! with the SQL filters +/// +use canyon_sql::{ + crud::CrudOperations, + query::{operators::Comp, ops::QueryBuilder}, +}; + +use crate::constants::SQL_SERVER_DS; +use crate::tests_models::league::*; +use crate::tests_models::player::*; +use crate::tests_models::tournament::*; + +/// Builds a new SQL statement for retrieves entities of the `T` type, filtered +/// with the parameters that modifies the base SQL to SELECT * FROM +#[canyon_sql::macros::canyon_tokio_test] +fn test_generated_sql_by_the_select_querybuilder() { + let mut select_with_joins = League::select_query(); + select_with_joins + .inner_join("tournament", "league.id", "tournament.league_id") + .left_join("team", "tournament.id", "player.tournament_id") + .r#where(LeagueFieldValue::id(&7), Comp::Gt) + .and(LeagueFieldValue::name(&"KOREA"), Comp::Eq) + .and_values_in(LeagueField::name, &["LCK", "STRANGER THINGS"]); + // .query() + // .await; + // NOTE: We don't have in the docker the generated relationships + // with the joins, so for now, we are just going to check that the + // generated SQL by the SelectQueryBuilder is the spected + assert_eq!( + select_with_joins.read_sql(), + "SELECT * FROM league INNER JOIN tournament ON league.id = tournament.league_id LEFT JOIN team ON tournament.id = player.tournament_id WHERE id > $1 AND name = $2 AND name IN ($2, $3) " + ) +} + +/// Builds a new SQL statement for retrieves entities of the `T` type, filtered +/// with the parameters that modifies the base SQL to SELECT * FROM +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_with_querybuilder() { + // Find all the leagues with ID less or equals that 7 + // and where it's region column value is equals to 'Korea' + let filtered_leagues_result: Result, _> = League::select_query() + .r#where(LeagueFieldValue::id(&50), Comp::LtEq) + .and(LeagueFieldValue::region(&"KOREA"), Comp::Eq) + .query() + .await; + + let filtered_leagues: Vec = filtered_leagues_result.unwrap(); + assert!(!filtered_leagues.is_empty()); + + let league_idx_0 = filtered_leagues.get(0).unwrap(); + assert_eq!(league_idx_0.id, 34); + assert_eq!(league_idx_0.region, "KOREA"); +} + +/// Same than the above but with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_with_querybuilder_datasource() { + // Find all the players where its ID column value is greater that 50 + let filtered_find_players = Player::select_query_datasource(SQL_SERVER_DS) + .r#where(PlayerFieldValue::id(&50), Comp::Gt) + .query() + .await; + + assert!(!filtered_find_players.unwrap().is_empty()); +} + +/// Updates the values of the range on entries defined by the constraint paramenters +/// in the database entity +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_update_with_querybuilder() { + // Find all the leagues with ID less or equals that 7 + // and where it's region column value is equals to 'Korea' + let mut q = League::update_query(); + q.set(&[ + (LeagueField::slug, "Updated with the QueryBuilder"), + (LeagueField::name, "Random"), + ]) + .r#where(LeagueFieldValue::id(&1), Comp::Gt) + .and(LeagueFieldValue::id(&8), Comp::Lt); + + /* Family of QueryBuilders are clone, useful in case of need to read the generated SQL + let qpr = q.clone(); + println!("PSQL: {:?}", qpr.read_sql()); + */ + + // We can now back to the original an throw the query + q.query() + .await + .expect("Failed to update records with the querybuilder"); + + let found_updated_values = League::select_query() + .r#where(LeagueFieldValue::id(&1), Comp::Gt) + .and(LeagueFieldValue::id(&7), Comp::Lt) + .query() + .await + .expect("Failed to retrieve database League entries with the querybuilder"); + + found_updated_values + .iter() + .for_each(|league| assert_eq!(league.slug, "Updated with the QueryBuilder")); +} + +/// Same as above, but with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_update_with_querybuilder_datasource() { + // Find all the leagues with ID less or equals that 7 + // and where it's region column value is equals to 'Korea' + let mut q = Player::update_query_datasource(SQL_SERVER_DS); + q.set(&[ + (PlayerField::summoner_name, "Random updated player name"), + (PlayerField::first_name, "I am an updated first name"), + ]) + .r#where(PlayerFieldValue::id(&1), Comp::Gt) + .and(PlayerFieldValue::id(&8), Comp::Lt) + .query() + .await + .expect("Failed to update records with the querybuilder"); + + let found_updated_values = Player::select_query_datasource(SQL_SERVER_DS) + .r#where(PlayerFieldValue::id(&1), Comp::Gt) + .and(PlayerFieldValue::id(&7), Comp::LtEq) + .query() + .await + .expect("Failed to retrieve database League entries with the querybuilder"); + + found_updated_values.iter().for_each(|player| { + assert_eq!(player.summoner_name, "Random updated player name"); + assert_eq!(player.first_name, "I am an updated first name"); + }); +} + +/// Deletes entries from the mapped entity `T` that are in the ranges filtered +/// with the QueryBuilder +/// +/// Note if the database is persisted (not created and destroyed on every docker or +/// GitHub Action wake up), it won't delete things that already have been deleted, +/// but this isn't an error. They just don't exists. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_delete_with_querybuilder() { + Tournament::delete_query() + .r#where(TournamentFieldValue::id(&14), Comp::Gt) + .and(TournamentFieldValue::id(&16), Comp::Lt) + .query() + .await + .expect("Error connecting with the database on the delete operation"); + + assert_eq!(Tournament::find_by_pk(&15).await.unwrap(), None); +} + +/// Same as the above delete, but with the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_delete_with_querybuilder_datasource() { + Player::delete_query_datasource(SQL_SERVER_DS) + .r#where(PlayerFieldValue::id(&120), Comp::Gt) + .and(PlayerFieldValue::id(&130), Comp::Lt) + .query() + .await + .expect("Error connecting with the database when we are going to delete data! :)"); + + assert!(Player::select_query_datasource(SQL_SERVER_DS) + .r#where(PlayerFieldValue::id(&122), Comp::Eq) + .query() + .await + .unwrap() + .is_empty()); +} + +/// Tests for the generated SQL query after use the +/// WHERE clause +#[canyon_sql::macros::canyon_tokio_test] +fn test_where_clause() { + let mut l = League::select_query(); + l.r#where(LeagueFieldValue::name(&"LEC"), Comp::Eq); + + assert_eq!(l.read_sql(), "SELECT * FROM league WHERE name = $1") +} + +/// Tests for the generated SQL query after use the +/// AND clause +#[canyon_sql::macros::canyon_tokio_test] +fn test_and_clause() { + let mut l = League::select_query(); + l.r#where(LeagueFieldValue::name(&"LEC"), Comp::Eq) + .and(LeagueFieldValue::id(&10), Comp::LtEq); + + assert_eq!( + l.read_sql().trim(), + "SELECT * FROM league WHERE name = $1 AND id <= $2" + ) +} + +/// Tests for the generated SQL query after use the +/// AND clause +#[canyon_sql::macros::canyon_tokio_test] +fn test_and_clause_with_in_constraint() { + let mut l = League::select_query(); + l.r#where(LeagueFieldValue::name(&"LEC"), Comp::Eq) + .and_values_in(LeagueField::id, &[1, 7, 10]); + + assert_eq!( + l.read_sql().trim(), + "SELECT * FROM league WHERE name = $1 AND id IN ($1, $2, $3)" + ) +} + +/// Tests for the generated SQL query after use the +/// AND clause +#[canyon_sql::macros::canyon_tokio_test] +fn test_or_clause() { + let mut l = League::select_query(); + l.r#where(LeagueFieldValue::name(&"LEC"), Comp::Eq) + .or(LeagueFieldValue::id(&10), Comp::LtEq); + + assert_eq!( + l.read_sql().trim(), + "SELECT * FROM league WHERE name = $1 OR id <= $2" + ) +} + +/// Tests for the generated SQL query after use the +/// AND clause +#[canyon_sql::macros::canyon_tokio_test] +fn test_or_clause_with_in_constraint() { + let mut l = League::select_query(); + l.r#where(LeagueFieldValue::name(&"LEC"), Comp::Eq) + .or_values_in(LeagueField::id, &[1, 7, 10]); + + assert_eq!( + l.read_sql(), + "SELECT * FROM league WHERE name = $1 OR id IN ($1, $2, $3) " + ) +} + +/// Tests for the generated SQL query after use the +/// AND clause +#[canyon_sql::macros::canyon_tokio_test] +fn test_order_by_clause() { + let mut l = League::select_query(); + l.r#where(LeagueFieldValue::name(&"LEC"), Comp::Eq) + .order_by(LeagueField::id, false); + + assert_eq!( + l.read_sql(), + "SELECT * FROM league WHERE name = $1 ORDER BY id" + ) +} diff --git a/tests/crud/select_operations.rs b/tests/crud/select_operations.rs new file mode 100644 index 00000000..26e0e5f2 --- /dev/null +++ b/tests/crud/select_operations.rs @@ -0,0 +1,121 @@ +#![allow(clippy::nonminimal_bool)] + +use crate::constants::SQL_SERVER_DS; +///! Integration tests for the CRUD operations available in `Canyon` that +///! generates and executes *SELECT* statements +use crate::Error; +use canyon_sql::crud::CrudOperations; + +use crate::tests_models::league::*; +use crate::tests_models::player::*; + +/// Tests the behaviour of a SELECT * FROM {table_name} within Canyon, through the +/// `::find_all()` associated function derived with the `CanyonCrud` derive proc-macro +/// and using the *default datasource* +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_all() { + let find_all_result: Result, Box> = + League::find_all().await; + + // Connection doesn't return an error + assert!(!find_all_result.is_err()); + assert!(!find_all_result.unwrap().is_empty()); + + let find_all_players: Result, Box> = + Player::find_all().await; + assert!(!find_all_players.unwrap().is_empty()); +} + +/// Same as the `find_all()`, but with the unchecked variant, which directly returns `Vec` not +/// `Result` wrapped +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_all_unchecked() { + let find_all_result: Vec = League::find_all_unchecked().await; + assert!(!find_all_result.is_empty()); +} + +/// Tests the behaviour of a SELECT * FROM {table_name} within Canyon, through the +/// `::find_all()` associated function derived with the `CanyonCrud` derive proc-macro +/// and using the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_all_datasource() { + let find_all_result: Result, Box> = + League::find_all_datasource(SQL_SERVER_DS).await; + // Connection doesn't return an error + assert!(!find_all_result.is_err()); + assert!(!find_all_result.unwrap().is_empty()); +} + +/// Same as the `find_all_datasource()`, but with the unchecked variant and the specified dataosource, +/// returning directly `Vec` and not `Result, Err>` +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_all_unchecked_datasource() { + let find_all_result: Vec = League::find_all_unchecked_datasource(SQL_SERVER_DS).await; + assert!(!find_all_result.is_empty()); +} + +/// Tests the behaviour of a SELECT * FROM {table_name} WHERE = , where the pk is +/// defined with the #[primary_key] attribute over some field of the type. +/// +/// Uses the *default datasource*. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_by_pk() { + let find_by_pk_result: Result, Box> = + League::find_by_pk(&1).await; + assert!(find_by_pk_result.as_ref().unwrap().is_some()); + + let some_league = find_by_pk_result.unwrap().unwrap(); + assert_eq!(some_league.id, 1); + assert_eq!(some_league.ext_id, 100695891328981122_i64); + assert_eq!(some_league.slug, "european-masters"); + assert_eq!(some_league.name, "European Masters"); + assert_eq!(some_league.region, "EUROPE"); + assert_eq!( + some_league.image_url, + "http://static.lolesports.com/leagues/EM_Bug_Outline1.png" + ); +} + +/// Tests the behaviour of a SELECT * FROM {table_name} WHERE = , where the pk is +/// defined with the #[primary_key] attribute over some field of the type. +/// +/// Uses the *specified datasource* in the second parameter of the function call. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_find_by_pk_datasource() { + let find_by_pk_result: Result, Box> = + League::find_by_pk_datasource(&27, SQL_SERVER_DS).await; + assert!(find_by_pk_result.as_ref().unwrap().is_some()); + + let some_league = find_by_pk_result.unwrap().unwrap(); + assert_eq!(some_league.id, 27); + assert_eq!(some_league.ext_id, 107898214974993351_i64); + assert_eq!(some_league.slug, "college_championship"); + assert_eq!(some_league.name, "College Championship"); + assert_eq!(some_league.region, "NORTH AMERICA"); + assert_eq!( + some_league.image_url, + "http://static.lolesports.com/leagues/1646396098648_CollegeChampionshiplogo.png" + ); +} + +/// Counts how many rows contains an entity on the target database. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_count_operation() { + assert_eq!( + League::find_all().await.unwrap().len() as i64, + League::count().await.unwrap() + ); +} + +/// Counts how many rows contains an entity on the target database using +/// the specified datasource +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_count_datasource_operation() { + assert_eq!( + League::find_all_datasource(SQL_SERVER_DS) + .await + .unwrap() + .len() as i64, + League::count_datasource(SQL_SERVER_DS).await.unwrap() + ); +} diff --git a/tests/crud/update_operations.rs b/tests/crud/update_operations.rs new file mode 100644 index 00000000..d0643cae --- /dev/null +++ b/tests/crud/update_operations.rs @@ -0,0 +1,95 @@ +///! Integration tests for the CRUD operations available in `Canyon` that +///! generates and executes *UPDATE* statements +use canyon_sql::crud::CrudOperations; + +use crate::constants::SQL_SERVER_DS; +use crate::tests_models::league::*; + +/// Update operation is a *CRUD* method defined for some entity `T`, that works by appliying +/// some change to a Rust's entity instance, and persisting them into the database. +/// +/// The `t.update(&self)` operation is only enabled for types that +/// has, at least, one of it's fields annotated with a `#[primary_key]` +/// operation, because we use that concrete field to construct the clause that targets +/// that entity. +/// +/// Attemp of usage the `t.update(&self)` method on an entity without `#[primary_key]` +/// will raise a runtime error. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_update_method_operation() { + // We first retrieve some entity from the database. Note that we must make + // the retrieved instance mutable of clone it to a new mutable resource + let mut updt_candidate: League = League::find_by_pk(&1) + .await + .expect("[1] - Failed the query to the database") + .expect("[1] - No entity found for the primary key value passed in"); + + // The ext_id field value is extracted from the sql scripts under the + // docker/sql folder. We are retrieving the first entity inserted at the + // wake up time of the database, and now checking some of its properties. + assert_eq!(updt_candidate.ext_id, 100695891328981122_i64); + + // Modify the value, and perform the update + let updt_value: i64 = 593064_i64; + updt_candidate.ext_id = updt_value; + updt_candidate + .update() + .await + .expect("Failed the update operation"); + + // Retrieve it again, and check if the value was really updated + let updt_entity: League = League::find_by_pk(&1) + .await + .expect("[2] - Failed the query to the database") + .expect("[2] - No entity found for the primary key value passed in"); + + assert_eq!(updt_entity.ext_id, updt_value); + + // We rollback the changes to the initial value to don't broke other tests + // the next time that will run + updt_candidate.ext_id = 100695891328981122_i64; + updt_candidate + .update() + .await + .expect("Failed the restablish initial value update operation"); +} + +/// Same as the above test, but with the specified datasource. +#[canyon_sql::macros::canyon_tokio_test] +fn test_crud_update_datasource_method_operation() { + // We first retrieve some entity from the database. Note that we must make + // the retrieved instance mutable of clone it to a new mutable resource + let mut updt_candidate: League = League::find_by_pk_datasource(&1, SQL_SERVER_DS) + .await + .expect("[1] - Failed the query to the database") + .expect("[1] - No entity found for the primary key value passed in"); + + // The ext_id field value is extracted from the sql scripts under the + // docker/sql folder. We are retrieving the first entity inserted at the + // wake up time of the database, and now checking some of its properties. + assert_eq!(updt_candidate.ext_id, 100695891328981122_i64); + + // Modify the value, and perform the update + let updt_value: i64 = 59306442534_i64; + updt_candidate.ext_id = updt_value; + updt_candidate + .update_datasource(SQL_SERVER_DS) + .await + .expect("Failed the update operation"); + + // Retrieve it again, and check if the value was really updated + let updt_entity: League = League::find_by_pk_datasource(&1, SQL_SERVER_DS) + .await + .expect("[2] - Failed the query to the database") + .expect("[2] - No entity found for the primary key value passed in"); + + assert_eq!(updt_entity.ext_id, updt_value); + + // We rollback the changes to the initial value to don't broke other tests + // the next time that will run + updt_candidate.ext_id = 100695891328981122_i64; + updt_candidate + .update_datasource(SQL_SERVER_DS) + .await + .expect("Failed to restablish the initial value update operation"); +} diff --git a/tests/migrations/mod.rs b/tests/migrations/mod.rs new file mode 100644 index 00000000..17b19c35 --- /dev/null +++ b/tests/migrations/mod.rs @@ -0,0 +1,26 @@ +///! Integration tests for the migrations feature of `Canyon-SQL` +use canyon_sql::{crud::Transaction, migrations::handler::Migrations}; + +use crate::constants; + +/// Brings the information of the `PostgreSQL` requested schema +#[canyon_sql::macros::canyon_tokio_test] +fn test_migrations_postgresql_status_query() { + let results = Migrations::query(constants::FETCH_PUBLIC_SCHEMA, [], constants::PSQL_DS).await; + assert!(results.is_ok()); + + let public_schema_info = results.ok().unwrap().postgres; + + let first_result = public_schema_info.get(0).unwrap(); + + assert_eq!(first_result.columns().get(0).unwrap().name(), "table_name"); + assert_eq!( + first_result.columns().get(0).unwrap().type_().name(), + "name" + ); + assert_eq!(first_result.columns().get(0).unwrap().type_().oid(), 19); + assert_eq!( + first_result.columns().get(0).unwrap().type_().schema(), + "pg_catalog" + ); +} diff --git a/tests/tests_models/league.rs b/tests/tests_models/league.rs new file mode 100644 index 00000000..3f3037e7 --- /dev/null +++ b/tests/tests_models/league.rs @@ -0,0 +1,14 @@ +use canyon_sql::macros::*; + +#[derive(Debug, Fields, CanyonCrud, CanyonMapper, ForeignKeyable, Eq, PartialEq)] +// #[canyon_entity(table_name = "league", schema = "public")] +#[canyon_entity(table_name = "league")] +pub struct League { + #[primary_key] + id: i32, + ext_id: i64, + slug: String, + name: String, + region: String, + image_url: String, +} diff --git a/tests/tests_models/mod.rs b/tests/tests_models/mod.rs new file mode 100644 index 00000000..bba7142b --- /dev/null +++ b/tests/tests_models/mod.rs @@ -0,0 +1,3 @@ +pub mod league; +pub mod player; +pub mod tournament; diff --git a/tests/tests_models/player.rs b/tests/tests_models/player.rs new file mode 100644 index 00000000..2a06c109 --- /dev/null +++ b/tests/tests_models/player.rs @@ -0,0 +1,24 @@ +use canyon_sql::macros::*; + +#[derive(Debug, Clone, Fields, CanyonCrud, CanyonMapper, Eq, PartialEq)] +#[canyon_entity] +/// Data model that represents a database entity for Players. +/// +/// For test the behaviour of Canyon with entities that no declares primary keys, +/// or that is configuration isn't autoincremental, we will use this class. +/// Note that this entity has a primary key declared in the database, but we will +/// omit this in Canyon, so for us, is like if the primary key wasn't setted up. +/// +/// Remember that the entities that does not declares at least a field as `#[primary_key]` +/// does not have all the CRUD operations available, only the ones that doesn't +/// requires of a primary key. +pub struct Player { + // #[primary_key] We will omit this to use it as a mock of entities that doesn't declares primary key + id: i32, + ext_id: i64, + first_name: String, + last_name: String, + summoner_name: String, + image_url: Option, + role: String, +} diff --git a/tests/tests_models/tournament.rs b/tests/tests_models/tournament.rs new file mode 100644 index 00000000..880076f4 --- /dev/null +++ b/tests/tests_models/tournament.rs @@ -0,0 +1,15 @@ +use crate::tests_models::league::League; +use canyon_sql::{date_time::NaiveDate, macros::*}; + +#[derive(Debug, Clone, Fields, CanyonCrud, CanyonMapper, Eq, PartialEq)] +#[canyon_entity] +pub struct Tournament { + #[primary_key] + id: i32, + ext_id: i64, + slug: String, + start_date: NaiveDate, + end_date: NaiveDate, + #[foreign_key(table = "league", column = "id")] + league: i32, +}