From 51ea4b38681020a23297dee66486ffcd1e0a8280 Mon Sep 17 00:00:00 2001 From: tobymao Date: Wed, 11 Mar 2026 16:20:36 -0700 Subject: [PATCH] update links Signed-off-by: tobymao --- README.md | 14 ++++++++------ docs/HOWTO.md | 2 +- .../scheduler/hybrid_executors_docker_compose.md | 2 +- docs/concepts/macros/sqlmesh_macros.md | 2 +- docs/development.md | 2 +- docs/examples/incremental_time_full_walkthrough.md | 2 +- docs/examples/overview.md | 8 ++++---- docs/guides/custom_materializations.md | 10 +++++----- docs/guides/linter.md | 2 +- docs/guides/model_selection.md | 2 +- docs/guides/multi_repo.md | 6 +++--- docs/guides/notifications.md | 2 +- docs/index.md | 10 +++++----- docs/integrations/dbt.md | 2 +- docs/integrations/dlt.md | 2 +- docs/integrations/github.md | 2 +- docs/quickstart/cli.md | 2 +- docs/reference/python.md | 2 +- mkdocs.yml | 6 +++--- pdoc/cli.py | 2 +- posts/virtual_data_environments.md | 4 ++-- pyproject.toml | 4 ++-- sqlmesh/cli/main.py | 2 +- sqlmesh/core/model/common.py | 4 ++-- sqlmesh/core/plan/builder.py | 2 +- sqlmesh/core/renderer.py | 2 +- sqlmesh/core/snapshot/evaluator.py | 2 +- sqlmesh/core/state_sync/db/migrator.py | 2 +- sqlmesh/dbt/column.py | 2 +- .../v0092_warn_about_dbt_data_type_diff.py | 4 ++-- tests/core/engine_adapter/test_mssql.py | 4 ++-- tests/core/test_config.py | 2 +- tests/fixtures/dbt/empty_project/profiles.yml | 2 +- tests/pyproject.toml | 4 ++-- web/common/package.json | 2 +- 35 files changed, 63 insertions(+), 61 deletions(-) diff --git a/README.md b/README.md index 0a1b2af718..41f78cc138 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ It is more than just a [dbt alternative](https://tobikodata.com/reduce_costs_wit ## Core Features -SQLMesh Plan Mode +SQLMesh Plan Mode > Get instant SQL impact and context of your changes, both in the CLI and in the [SQLMesh VSCode Extension](https://sqlmesh.readthedocs.io/en/latest/guides/vscode/?h=vs+cod) @@ -122,12 +122,12 @@ outputs: * Never build a table [more than once](https://tobikodata.com/simplicity-or-efficiency-how-dbt-makes-you-choose.html) * Track what data’s been modified and run only the necessary transformations for [incremental models](https://tobikodata.com/correctly-loading-incremental-data-at-scale.html) * Run [unit tests](https://tobikodata.com/we-need-even-greater-expectations.html) for free and configure automated audits -* Run [table diffs](https://sqlmesh.readthedocs.io/en/stable/examples/sqlmesh_cli_crash_course/?h=crash#run-data-diff-against-prod) between prod and dev based on tables/views impacted by a change +* Run [table diffs](https://sqlmesh.readthedocs.io/en/stable/examples/sqlmesh_cli_crash_course/?h=crash#run-data-diff-against-prod) between prod and dev based on tables/views impacted by a change
Level Up Your SQL Write SQL in any dialect and SQLMesh will transpile it to your target SQL dialect on the fly before sending it to the warehouse. -Transpile Example +Transpile Example
* Debug transformation errors *before* you run them in your warehouse in [10+ different SQL dialects](https://sqlmesh.readthedocs.io/en/stable/integrations/overview/#execution-engines) @@ -170,15 +170,17 @@ sqlmesh init # follow the prompts to get started (choose DuckDB) Follow the [quickstart guide](https://sqlmesh.readthedocs.io/en/stable/quickstart/cli/) to learn how to use SQLMesh. You already have a head start! -Follow the [crash course](https://sqlmesh.readthedocs.io/en/stable/examples/sqlmesh_cli_crash_course/) to learn the core movesets and use the easy to reference cheat sheet. +Follow the [crash course](https://sqlmesh.readthedocs.io/en/stable/examples/sqlmesh_cli_crash_course/) to learn the core movesets and use the easy to reference cheat sheet. Follow this [example](https://sqlmesh.readthedocs.io/en/stable/examples/incremental_time_full_walkthrough/) to learn how to use SQLMesh in a full walkthrough. ## Join Our Community Connect with us in the following ways: -* Join the [SQLMesh Slack Community](https://tobikodata.com/slack) to ask questions, or just to say hi! -* File an issue on our [GitHub](https://github.com/sqlmesh/sqlmesh/issues/new) +* Join the [Tobiko Slack Community](https://tobikodata.com/slack) to ask questions, or just to say hi! +* File an issue on our [GitHub](https://github.com/SQLMesh/sqlmesh/issues/new) +* Send us an email at [hello@tobikodata.com](mailto:hello@tobikodata.com) with your questions or feedback +* Read our [blog](https://tobikodata.com/blog) ## Contributing We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines on how to contribute, including our DCO sign-off requirement. diff --git a/docs/HOWTO.md b/docs/HOWTO.md index 9ccefff077..edd7c9833f 100644 --- a/docs/HOWTO.md +++ b/docs/HOWTO.md @@ -92,7 +92,7 @@ You will work on the docs in a local copy of the sqlmesh git repository. If you don't have a copy of the repo on your machine, open a terminal and clone it into a `sqlmesh` directory by executing: ``` bash -git clone https://github.com/TobikoData/sqlmesh.git +git clone https://github.com/SQLMesh/sqlmesh.git ``` And navigate to the directory: diff --git a/docs/cloud/features/scheduler/hybrid_executors_docker_compose.md b/docs/cloud/features/scheduler/hybrid_executors_docker_compose.md index e3bd072752..8f8f323139 100644 --- a/docs/cloud/features/scheduler/hybrid_executors_docker_compose.md +++ b/docs/cloud/features/scheduler/hybrid_executors_docker_compose.md @@ -25,7 +25,7 @@ Both executors must be properly configured with environment variables to connect 1. **Get docker-compose file**: - Download the [docker-compose.yml](https://raw.githubusercontent.com/TobikoData/sqlmesh/refs/heads/main/docs/cloud/features/scheduler/scheduler/docker-compose.yml) and [.env.example](https://raw.githubusercontent.com/TobikoData/sqlmesh/refs/heads/main/docs/cloud/features/scheduler/scheduler/.env.example) files to a local directory. + Download the [docker-compose.yml](https://raw.githubusercontent.com/SQLMesh/sqlmesh/refs/heads/main/docs/cloud/features/scheduler/scheduler/docker-compose.yml) and [.env.example](https://raw.githubusercontent.com/SQLMesh/sqlmesh/refs/heads/main/docs/cloud/features/scheduler/scheduler/.env.example) files to a local directory. 2. **Create your environment file**: diff --git a/docs/concepts/macros/sqlmesh_macros.md b/docs/concepts/macros/sqlmesh_macros.md index f28e77e203..c7d967b12c 100644 --- a/docs/concepts/macros/sqlmesh_macros.md +++ b/docs/concepts/macros/sqlmesh_macros.md @@ -2111,7 +2111,7 @@ FROM some_table; Generics can be nested and are resolved recursively allowing for fairly robust type hinting. -See examples of the coercion function in action in the test suite [here](https://github.com/TobikoData/sqlmesh/blob/main/tests/core/test_macros.py). +See examples of the coercion function in action in the test suite [here](https://github.com/SQLMesh/sqlmesh/blob/main/tests/core/test_macros.py). #### Conclusion diff --git a/docs/development.md b/docs/development.md index 662ad17d6c..ff8b250d87 100644 --- a/docs/development.md +++ b/docs/development.md @@ -1,6 +1,6 @@ # Contribute to development -SQLMesh is licensed under [Apache 2.0](https://github.com/TobikoData/sqlmesh/blob/main/LICENSE). We encourage community contribution and would love for you to get involved. The following document outlines the process to contribute to SQLMesh. +SQLMesh is licensed under [Apache 2.0](https://github.com/SQLMesh/sqlmesh/blob/main/LICENSE). We encourage community contribution and would love for you to get involved. The following document outlines the process to contribute to SQLMesh. ## Prerequisites diff --git a/docs/examples/incremental_time_full_walkthrough.md b/docs/examples/incremental_time_full_walkthrough.md index 4e1d577d2c..ffa9def911 100644 --- a/docs/examples/incremental_time_full_walkthrough.md +++ b/docs/examples/incremental_time_full_walkthrough.md @@ -689,7 +689,7 @@ In the terminal output, I can see the change displayed like before, but I see so I leave the [effective date](../concepts/plans.md#effective-date) prompt blank because I do not want to reprocess historical data in `prod` - I only want to apply this new business logic going forward. -However, I do want to preview the new business logic in my `dev` environment before pushing to `prod`. Because I have [configured SQLMesh to create previews](https://github.com/TobikoData/sqlmesh-demos/blob/e0e3899e173cf7b8447ae707402a9df59911d1c0/config.yaml#L42) for forward-only models in my `config.yaml` file, SQLMesh has created a temporary copy of the `prod` table in my `dev` environment, so I can test the new logic on historical data. +However, I do want to preview the new business logic in my `dev` environment before pushing to `prod`. Because I have [configured SQLMesh to create previews](https://github.com/SQLMesh/sqlmesh-demos/blob/e0e3899e173cf7b8447ae707402a9df59911d1c0/config.yaml#L42) for forward-only models in my `config.yaml` file, SQLMesh has created a temporary copy of the `prod` table in my `dev` environment, so I can test the new logic on historical data. I specify the beginning of the preview's historical data window as `2024-10-27` in the preview start date prompt, and I specify the end of the window as now by leaving the preview end date prompt blank. diff --git a/docs/examples/overview.md b/docs/examples/overview.md index a252b3f9c2..e7dbc1916d 100644 --- a/docs/examples/overview.md +++ b/docs/examples/overview.md @@ -27,16 +27,16 @@ Walkthroughs are easy to follow and provide lots of information in a self-contai ## Projects -SQLMesh example projects are stored in the [sqlmesh-examples Github repository](https://github.com/TobikoData/sqlmesh-examples). The repository's front page includes additional information about how to download the files and set up the projects. +SQLMesh example projects are stored in the [sqlmesh-examples Github repository](https://github.com/SQLMesh/sqlmesh-examples). The repository's front page includes additional information about how to download the files and set up the projects. The two most comprehensive example projects use the SQLMesh `sushi` data, based on a fictional sushi restaurant. ("Tobiko" is the Japanese word for flying fish roe, commonly used in sushi.) -The `sushi` data is described in an [overview notebook](https://github.com/TobikoData/sqlmesh-examples/blob/main/001_sushi/sushi-overview.ipynb) in the repository. +The `sushi` data is described in an [overview notebook](https://github.com/SQLMesh/sqlmesh-examples/blob/main/001_sushi/sushi-overview.ipynb) in the repository. The example repository include two versions of the `sushi` project, at different levels of complexity: -- The [`simple` project](https://github.com/TobikoData/sqlmesh-examples/tree/main/001_sushi/1_simple) contains four `VIEW` and one `SEED` model +- The [`simple` project](https://github.com/SQLMesh/sqlmesh-examples/tree/main/001_sushi/1_simple) contains four `VIEW` and one `SEED` model - The `VIEW` model kind refreshes every run, making it easy to reason about SQLMesh's behavior -- The [`moderate` project](https://github.com/TobikoData/sqlmesh-examples/tree/main/001_sushi/2_moderate) contains five `INCREMENTAL_BY_TIME_RANGE`, one `FULL`, one `VIEW`, and one `SEED` model +- The [`moderate` project](https://github.com/SQLMesh/sqlmesh-examples/tree/main/001_sushi/2_moderate) contains five `INCREMENTAL_BY_TIME_RANGE`, one `FULL`, one `VIEW`, and one `SEED` model - The incremental models allow you to observe how and when new data is transformed by SQLMesh - Some models, like `customer_revenue_lifetime`, demonstrate more advanced incremental queries like customer lifetime value calculation diff --git a/docs/guides/custom_materializations.md b/docs/guides/custom_materializations.md index 58eb64026d..905a3d017e 100644 --- a/docs/guides/custom_materializations.md +++ b/docs/guides/custom_materializations.md @@ -24,13 +24,13 @@ A custom materialization must: - Be written in Python code - Be a Python class that inherits the SQLMesh `CustomMaterialization` base class -- Use or override the `insert` method from the SQLMesh [`MaterializableStrategy`](https://github.com/TobikoData/sqlmesh/blob/034476e7f64d261860fd630c3ac56d8a9c9f3e3a/sqlmesh/core/snapshot/evaluator.py#L1146) class/subclasses +- Use or override the `insert` method from the SQLMesh [`MaterializableStrategy`](https://github.com/SQLMesh/sqlmesh/blob/034476e7f64d261860fd630c3ac56d8a9c9f3e3a/sqlmesh/core/snapshot/evaluator.py#L1146) class/subclasses - Be loaded or imported by SQLMesh at runtime A custom materialization may: -- Use or override methods from the SQLMesh [`MaterializableStrategy`](https://github.com/TobikoData/sqlmesh/blob/034476e7f64d261860fd630c3ac56d8a9c9f3e3a/sqlmesh/core/snapshot/evaluator.py#L1146) class/subclasses -- Use or override methods from the SQLMesh [`EngineAdapter`](https://github.com/TobikoData/sqlmesh/blob/034476e7f64d261860fd630c3ac56d8a9c9f3e3a/sqlmesh/core/engine_adapter/base.py#L67) class/subclasses +- Use or override methods from the SQLMesh [`MaterializableStrategy`](https://github.com/SQLMesh/sqlmesh/blob/034476e7f64d261860fd630c3ac56d8a9c9f3e3a/sqlmesh/core/snapshot/evaluator.py#L1146) class/subclasses +- Use or override methods from the SQLMesh [`EngineAdapter`](https://github.com/SQLMesh/sqlmesh/blob/034476e7f64d261860fd630c3ac56d8a9c9f3e3a/sqlmesh/core/engine_adapter/base.py#L67) class/subclasses - Execute arbitrary SQL code and fetch results with the engine adapter `execute` and related methods A custom materialization may perform arbitrary Python processing with Pandas or other libraries, but in most cases that logic should reside in a [Python model](../concepts/models/python_models.md) instead of the materialization. @@ -157,7 +157,7 @@ class CustomFullMaterialization(CustomMaterialization): ) -> None: config_value = model.custom_materialization_properties["config_key"] # Proceed with implementing the insertion logic. - # Example existing materialization for look and feel: https://github.com/TobikoData/sqlmesh/blob/main/sqlmesh/core/snapshot/evaluator.py + # Example existing materialization for look and feel: https://github.com/SQLMesh/sqlmesh/blob/main/sqlmesh/core/snapshot/evaluator.py ``` ## Extending `CustomKind` @@ -292,4 +292,4 @@ setup( ) ``` -Refer to the SQLMesh Github [custom_materializations](https://github.com/TobikoData/sqlmesh/tree/main/examples/custom_materializations) example for more details on Python packaging. +Refer to the SQLMesh Github [custom_materializations](https://github.com/SQLMesh/sqlmesh/tree/main/examples/custom_materializations) example for more details on Python packaging. diff --git a/docs/guides/linter.md b/docs/guides/linter.md index 22cc5077b8..6cdac167ec 100644 --- a/docs/guides/linter.md +++ b/docs/guides/linter.md @@ -16,7 +16,7 @@ Some rules validate that a pattern is *not* present, such as not allowing `SELEC Rules are defined in Python. Each rule is an individual Python class that inherits from SQLMesh's `Rule` base class and defines the logic for validating a pattern. -We display a portion of the `Rule` base class's code below ([full source code](https://github.com/TobikoData/sqlmesh/blob/main/sqlmesh/core/linter/rule.py)). Its methods and properties illustrate the most important components of the subclassed rules you define. +We display a portion of the `Rule` base class's code below ([full source code](https://github.com/SQLMesh/sqlmesh/blob/main/sqlmesh/core/linter/rule.py)). Its methods and properties illustrate the most important components of the subclassed rules you define. Each rule class you create has four vital components: diff --git a/docs/guides/model_selection.md b/docs/guides/model_selection.md index e6178246d6..79fd17a18c 100644 --- a/docs/guides/model_selection.md +++ b/docs/guides/model_selection.md @@ -78,7 +78,7 @@ NOTE: the `--backfill-model` argument can only be used in development environmen ## Examples -We now demonstrate the use of `--select-model` and `--backfill-model` with the SQLMesh `sushi` example project, available in the `examples/sushi` directory of the [SQLMesh Github repository](https://github.com/TobikoData/sqlmesh). +We now demonstrate the use of `--select-model` and `--backfill-model` with the SQLMesh `sushi` example project, available in the `examples/sushi` directory of the [SQLMesh Github repository](https://github.com/SQLMesh/sqlmesh). ### sushi diff --git a/docs/guides/multi_repo.md b/docs/guides/multi_repo.md index bf34c7d21a..4dae4de57e 100644 --- a/docs/guides/multi_repo.md +++ b/docs/guides/multi_repo.md @@ -5,7 +5,7 @@ SQLMesh provides native support for multiple repos and makes it easy to maintain If you are wanting to separate your systems/data and provide isolation, checkout the [isolated systems guide](https://sqlmesh.readthedocs.io/en/stable/guides/isolated_systems/?h=isolated). ## Bootstrapping multiple projects -Setting up SQLMesh with multiple repos is quite simple. Copy the contents of this example [multi-repo project](https://github.com/TobikoData/sqlmesh/tree/main/examples/multi). +Setting up SQLMesh with multiple repos is quite simple. Copy the contents of this example [multi-repo project](https://github.com/SQLMesh/sqlmesh/tree/main/examples/multi). To bootstrap the project, you can point SQLMesh at both projects. @@ -196,7 +196,7 @@ $ sqlmesh -p examples/multi/repo_1 migrate SQLMesh also supports multiple repos for dbt projects, allowing it to correctly detect changes and orchestrate backfills even when changes span multiple dbt projects. -You can watch a [quick demo](https://www.loom.com/share/69c083428bb348da8911beb2cd4d30b2) of this setup or experiment with the [multi-repo dbt example](https://github.com/TobikoData/sqlmesh/tree/main/examples/multi_dbt) yourself. +You can watch a [quick demo](https://www.loom.com/share/69c083428bb348da8911beb2cd4d30b2) of this setup or experiment with the [multi-repo dbt example](https://github.com/SQLMesh/sqlmesh/tree/main/examples/multi_dbt) yourself. ## Multi-repo mixed projects @@ -212,4 +212,4 @@ $ sqlmesh -p examples/multi_hybrid/dbt_repo -p examples/multi_hybrid/sqlmesh_rep SQLMesh will automatically detect dependencies and lineage across both SQLMesh and dbt projects, even when models are sourcing from different project types. -For an example of this setup, refer to the [mixed SQLMesh and dbt example](https://github.com/TobikoData/sqlmesh/tree/main/examples/multi_hybrid). +For an example of this setup, refer to the [mixed SQLMesh and dbt example](https://github.com/SQLMesh/sqlmesh/tree/main/examples/multi_hybrid). diff --git a/docs/guides/notifications.md b/docs/guides/notifications.md index 03405b8252..749a71c842 100644 --- a/docs/guides/notifications.md +++ b/docs/guides/notifications.md @@ -256,7 +256,7 @@ This example shows an email notification target, where `sushi@example.com` email In Python configuration files, new notification targets can be configured to send custom messages. -To customize a notification, create a new notification target class as a subclass of one of the three target classes described above (`SlackWebhookNotificationTarget`, `SlackApiNotificationTarget`, or `BasicSMTPNotificationTarget`). See the definitions of these classes on Github [here](https://github.com/TobikoData/sqlmesh/blob/main/sqlmesh/core/notification_target.py). +To customize a notification, create a new notification target class as a subclass of one of the three target classes described above (`SlackWebhookNotificationTarget`, `SlackApiNotificationTarget`, or `BasicSMTPNotificationTarget`). See the definitions of these classes on Github [here](https://github.com/SQLMesh/sqlmesh/blob/main/sqlmesh/core/notification_target.py). Each of those notification target classes is a subclass of `BaseNotificationTarget`, which contains a `notify` function corresponding to each event type. This table lists the notification functions, along with the contextual information available to them at calling time (e.g., the environment name for start/end events): diff --git a/docs/index.md b/docs/index.md index 3e9330f83f..83c1b0a431 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,7 +1,7 @@ #

- SQLMesh logo + SQLMesh logo

SQLMesh is a next-generation data transformation framework designed to ship data quickly, efficiently, and without error. Data teams can efficiently run and deploy data transformations written in SQL or Python with visibility and control at any size. @@ -9,11 +9,11 @@ SQLMesh is a next-generation data transformation framework designed to ship data It is more than just a [dbt alternative](https://tobikodata.com/reduce_costs_with_cron_and_partitions.html).

- Architecture Diagram + Architecture Diagram

## Core Features -SQLMesh Plan Mode +SQLMesh Plan Mode > Get instant SQL impact analysis of your changes, whether in the CLI or in [SQLMesh Plan Mode](https://sqlmesh.readthedocs.io/en/stable/guides/ui/?h=modes#working-with-an-ide) @@ -121,7 +121,7 @@ It is more than just a [dbt alternative](https://tobikodata.com/reduce_costs_wit ??? tip "Level Up Your SQL" Write SQL in any dialect and SQLMesh will transpile it to your target SQL dialect on the fly before sending it to the warehouse. - Transpile Example + Transpile Example * Debug transformation errors *before* you run them in your warehouse in [10+ different SQL dialects](https://sqlmesh.readthedocs.io/en/stable/integrations/overview/#execution-engines) * Definitions using [simply SQL](https://sqlmesh.readthedocs.io/en/stable/concepts/models/sql_models/#sql-based-definition) (no need for redundant and confusing `Jinja` + `YAML`) @@ -153,7 +153,7 @@ Follow this [example](https://sqlmesh.readthedocs.io/en/stable/examples/incremen Together, we want to build data transformation without the waste. Connect with us in the following ways: * Join the [Tobiko Slack Community](https://tobikodata.com/slack) to ask questions, or just to say hi! -* File an issue on our [GitHub](https://github.com/TobikoData/sqlmesh/issues/new) +* File an issue on our [GitHub](https://github.com/SQLMesh/sqlmesh/issues/new) * Send us an email at [hello@tobikodata.com](mailto:hello@tobikodata.com) with your questions or feedback * Read our [blog](https://tobikodata.com/blog) diff --git a/docs/integrations/dbt.md b/docs/integrations/dbt.md index 7cbef5b8fa..5854236aa2 100644 --- a/docs/integrations/dbt.md +++ b/docs/integrations/dbt.md @@ -358,4 +358,4 @@ The dbt jinja methods that are not currently supported are: ## Missing something you need? -Submit an [issue](https://github.com/TobikoData/sqlmesh/issues), and we'll look into it! +Submit an [issue](https://github.com/SQLMesh/sqlmesh/issues), and we'll look into it! diff --git a/docs/integrations/dlt.md b/docs/integrations/dlt.md index a53dc184ea..7125510de9 100644 --- a/docs/integrations/dlt.md +++ b/docs/integrations/dlt.md @@ -70,7 +70,7 @@ SQLMesh will retrieve the data warehouse connection credentials from your dlt pr ### Example -Generating a SQLMesh project dlt is quite simple. In this example, we'll use the example `sushi_pipeline.py` from the [sushi-dlt project](https://github.com/TobikoData/sqlmesh/tree/main/examples/sushi_dlt). +Generating a SQLMesh project dlt is quite simple. In this example, we'll use the example `sushi_pipeline.py` from the [sushi-dlt project](https://github.com/SQLMesh/sqlmesh/tree/main/examples/sushi_dlt). First, run the pipeline within the project directory: diff --git a/docs/integrations/github.md b/docs/integrations/github.md index 923714888e..07903fce56 100644 --- a/docs/integrations/github.md +++ b/docs/integrations/github.md @@ -364,7 +364,7 @@ These are the possible outputs (based on how the bot is configured) that are cre * `prod_plan_preview` * `prod_environment_synced` -[There are many possible conclusions](https://github.com/TobikoData/sqlmesh/blob/main/sqlmesh/integrations/github/cicd/controller.py#L96-L102) so the best use case for this is likely to check for `success` conclusion in order to potentially run follow up steps. +[There are many possible conclusions](https://github.com/SQLMesh/sqlmesh/blob/main/sqlmesh/integrations/github/cicd/controller.py#L96-L102) so the best use case for this is likely to check for `success` conclusion in order to potentially run follow up steps. Note that in error cases conclusions may not be set and therefore you will get an empty string. Example of running a step after pr environment has been synced: diff --git a/docs/quickstart/cli.md b/docs/quickstart/cli.md index 7b77b2af1e..a592847470 100644 --- a/docs/quickstart/cli.md +++ b/docs/quickstart/cli.md @@ -160,7 +160,7 @@ https://sqlmesh.readthedocs.io/en/stable/quickstart/cli/ Need help? - Docs: https://sqlmesh.readthedocs.io - Slack: https://www.tobikodata.com/slack -- GitHub: https://github.com/TobikoData/sqlmesh/issues +- GitHub: https://github.com/SQLMesh/sqlmesh/issues ``` ??? info "Learn more about the project's configuration: `config.yaml`" diff --git a/docs/reference/python.md b/docs/reference/python.md index 14e0da84c8..1c4c9191ff 100644 --- a/docs/reference/python.md +++ b/docs/reference/python.md @@ -4,6 +4,6 @@ SQLMesh is built in Python, and its complete Python API reference is located [he The Python API reference is comprehensive and includes the internal components of SQLMesh. Those components are likely only of interest if you want to modify SQLMesh itself. -If you want to use SQLMesh via its Python API, the best approach is to study how the SQLMesh [CLI](./cli.md) calls it behind the scenes. The CLI implementation code shows exactly which Python methods are called for each CLI command and can be viewed [on Github](https://github.com/TobikoData/sqlmesh/blob/main/sqlmesh/cli/main.py). For example, the Python code executed by the `plan` command is located [here](https://github.com/TobikoData/sqlmesh/blob/15c8788100fa1cfb8b0cc1879ccd1ad21dc3e679/sqlmesh/cli/main.py#L302). +If you want to use SQLMesh via its Python API, the best approach is to study how the SQLMesh [CLI](./cli.md) calls it behind the scenes. The CLI implementation code shows exactly which Python methods are called for each CLI command and can be viewed [on Github](https://github.com/SQLMesh/sqlmesh/blob/main/sqlmesh/cli/main.py). For example, the Python code executed by the `plan` command is located [here](https://github.com/SQLMesh/sqlmesh/blob/15c8788100fa1cfb8b0cc1879ccd1ad21dc3e679/sqlmesh/cli/main.py#L302). Almost all the relevant Python methods are in the [SQLMesh `Context` class](https://sqlmesh.readthedocs.io/en/stable/_readthedocs/html/sqlmesh/core/context.html#Context). diff --git a/mkdocs.yml b/mkdocs.yml index 47ddca54e9..86761de9d7 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,6 +1,6 @@ site_name: SQLMesh -repo_url: https://github.com/TobikoData/sqlmesh -repo_name: TobikoData/sqlmesh +repo_url: https://github.com/SQLMesh/sqlmesh +repo_name: SQLMesh/sqlmesh nav: - "Overview": index.md - Get started: @@ -202,7 +202,7 @@ extra: - icon: fontawesome/solid/paper-plane link: mailto:hello@tobikodata.com - icon: fontawesome/brands/github - link: https://github.com/TobikoData/sqlmesh/issues/new + link: https://github.com/SQLMesh/sqlmesh/issues/new analytics: provider: google property: G-JXQ1R227VS diff --git a/pdoc/cli.py b/pdoc/cli.py index 5833c59207..9301ae0444 100755 --- a/pdoc/cli.py +++ b/pdoc/cli.py @@ -29,7 +29,7 @@ def mocked_import(*args, **kwargs): opts.logo_link = "https://tobikodata.com" opts.footer_text = "Copyright Tobiko Data Inc. 2022" opts.template_directory = Path(__file__).parent.joinpath("templates").absolute() - opts.edit_url = ["sqlmesh=https://github.com/TobikoData/sqlmesh/tree/main/sqlmesh/"] + opts.edit_url = ["sqlmesh=https://github.com/SQLMesh/sqlmesh/tree/main/sqlmesh/"] with mock.patch("pdoc.__main__.parser", **{"parse_args.return_value": opts}): cli() diff --git a/posts/virtual_data_environments.md b/posts/virtual_data_environments.md index dc3b2cb46e..5cde9dba51 100644 --- a/posts/virtual_data_environments.md +++ b/posts/virtual_data_environments.md @@ -8,7 +8,7 @@ In this post, I'm going to explain why existing approaches to managing developme I'll introduce [Virtual Data Environments](#virtual-data-environments-1) - a novel approach that provides low-cost, efficient, scalable, and safe data environments that are easy to use and manage. They significantly boost the productivity of anyone who has to create or maintain data pipelines. -Finally, I’m going to explain how **Virtual Data Environments** are implemented in [SQLMesh](https://github.com/TobikoData/sqlmesh) and share details on each core component involved: +Finally, I’m going to explain how **Virtual Data Environments** are implemented in [SQLMesh](https://github.com/SQLMesh/sqlmesh) and share details on each core component involved: - Data [fingerprinting](#fingerprinting) - [Automatic change categorization](#automatic-change-categorization) - Decoupling of [physical](#physical-layer) and [virtual](#virtual-layer) layers @@ -156,6 +156,6 @@ With **Virtual Data Environments**, SQLMesh is able to provide fully **isolated* - Rolling back a change happens almost instantaneously since no data movement is involved and only views that are part of the **virtual layer** get updated. - Deploying changes to production is a **virtual layer** operation, which ensures that results observed during development are exactly the same in production and that data and code are always in sync. -To streamline deploying changes to production, our team is about to release the SQLMesh [CI/CD bot](https://github.com/TobikoData/sqlmesh/blob/main/docs/integrations/github.md), which will help automate this process. +To streamline deploying changes to production, our team is about to release the SQLMesh [CI/CD bot](https://github.com/SQLMesh/sqlmesh/blob/main/docs/integrations/github.md), which will help automate this process. Don't miss out - join our [Slack channel](https://tobikodata.com/slack) and stay tuned! diff --git a/pyproject.toml b/pyproject.toml index a3e2b9addb..ebfc112567 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -154,8 +154,8 @@ sqlmesh_lsp = "sqlmesh.lsp.main:main" [project.urls] Homepage = "https://sqlmesh.com/" Documentation = "https://sqlmesh.readthedocs.io/en/stable/" -Repository = "https://github.com/sqlmesh/sqlmesh" -Issues = "https://github.com/sqlmesh/sqlmesh/issues" +Repository = "https://github.com/SQLMesh/sqlmesh" +Issues = "https://github.com/SQLMesh/sqlmesh/issues" [build-system] requires = ["setuptools >= 61.0", "setuptools_scm"] diff --git a/sqlmesh/cli/main.py b/sqlmesh/cli/main.py index 45f95d2abb..ec5acbea59 100644 --- a/sqlmesh/cli/main.py +++ b/sqlmesh/cli/main.py @@ -246,7 +246,7 @@ def init( Need help? • Docs: https://sqlmesh.readthedocs.io • Slack: https://www.tobikodata.com/slack -• GitHub: https://github.com/TobikoData/sqlmesh/issues +• GitHub: https://github.com/SQLMesh/sqlmesh/issues """) diff --git a/sqlmesh/core/model/common.py b/sqlmesh/core/model/common.py index 9e117b56fb..dc51b3379c 100644 --- a/sqlmesh/core/model/common.py +++ b/sqlmesh/core/model/common.py @@ -255,7 +255,7 @@ def _add_variables_to_python_env( # - appear in metadata-only expressions, such as `audits (...)`, virtual statements, etc # - appear in the ASTs or definitions of metadata-only macros # - # See also: https://github.com/TobikoData/sqlmesh/pull/4936#issuecomment-3136339936, + # See also: https://github.com/SQLMesh/sqlmesh/pull/4936#issuecomment-3136339936, # specifically the "Terminology" and "Observations" section. metadata_used_variables = { var_name for var_name, is_metadata in used_variables.items() if is_metadata @@ -275,7 +275,7 @@ def _add_variables_to_python_env( if overlapping_variables := (non_metadata_used_variables & metadata_used_variables): raise ConfigError( f"Variables {', '.join(overlapping_variables)} are both metadata and non-metadata, " - "which is unexpected. Please file an issue at https://github.com/TobikoData/sqlmesh/issues/new." + "which is unexpected. Please file an issue at https://github.com/SQLMesh/sqlmesh/issues/new." ) metadata_variables = { diff --git a/sqlmesh/core/plan/builder.py b/sqlmesh/core/plan/builder.py index 7d753cc330..01834594cd 100644 --- a/sqlmesh/core/plan/builder.py +++ b/sqlmesh/core/plan/builder.py @@ -165,7 +165,7 @@ def __init__( # There may be an significant delay between the PlanBuilder producing a Plan and the Plan actually being run # so if execution_time=None is passed to the PlanBuilder, then the resulting Plan should also have execution_time=None # in order to prevent the Plan that was intended to run "as at now" from having "now" fixed to some time in the past - # ref: https://github.com/TobikoData/sqlmesh/pull/4702#discussion_r2140696156 + # ref: https://github.com/SQLMesh/sqlmesh/pull/4702#discussion_r2140696156 self._execution_time = execution_time self._backfill_models = backfill_models diff --git a/sqlmesh/core/renderer.py b/sqlmesh/core/renderer.py index 0cbf9b6e94..50c1faeb63 100644 --- a/sqlmesh/core/renderer.py +++ b/sqlmesh/core/renderer.py @@ -690,7 +690,7 @@ def _optimize_query(self, query: exp.Query, all_deps: t.Set[str]) -> exp.Query: except Exception as ex: raise_config_error( - f"Failed to optimize query, please file an issue at https://github.com/TobikoData/sqlmesh/issues/new. {ex}", + f"Failed to optimize query, please file an issue at https://github.com/SQLMesh/sqlmesh/issues/new. {ex}", self._path, ) diff --git a/sqlmesh/core/snapshot/evaluator.py b/sqlmesh/core/snapshot/evaluator.py index 1808011854..4f5102cbef 100644 --- a/sqlmesh/core/snapshot/evaluator.py +++ b/sqlmesh/core/snapshot/evaluator.py @@ -714,7 +714,7 @@ def _evaluate_snapshot( deployability_index = deployability_index or DeployabilityIndex.all_deployable() is_snapshot_deployable = deployability_index.is_deployable(snapshot) target_table_name = snapshot.table_name(is_deployable=is_snapshot_deployable) - # https://github.com/TobikoData/sqlmesh/issues/2609 + # https://github.com/SQLMesh/sqlmesh/issues/2609 # If there are no existing intervals yet; only consider this a first insert for the first snapshot in the batch if target_table_exists is None: target_table_exists = adapter.table_exists(target_table_name) diff --git a/sqlmesh/core/state_sync/db/migrator.py b/sqlmesh/core/state_sync/db/migrator.py index ad60c57570..8d73e1d395 100644 --- a/sqlmesh/core/state_sync/db/migrator.py +++ b/sqlmesh/core/state_sync/db/migrator.py @@ -195,7 +195,7 @@ def _apply_migrations( raise SQLMeshError( f"Number of snapshots before ({snapshot_count_before}) and after " f"({snapshot_count_after}) applying migration scripts {scripts} does not match. " - "Please file an issue issue at https://github.com/TobikoData/sqlmesh/issues/new." + "Please file an issue issue at https://github.com/SQLMesh/sqlmesh/issues/new." ) migrate_snapshots_and_environments = ( diff --git a/sqlmesh/dbt/column.py b/sqlmesh/dbt/column.py index 755f574388..80a6ad9325 100644 --- a/sqlmesh/dbt/column.py +++ b/sqlmesh/dbt/column.py @@ -42,7 +42,7 @@ def column_types_to_sqlmesh( ) if column_def.args.get("constraints"): logger.warning( - f"Ignoring unsupported constraints for column '{name}' with definition '{column.data_type}'. Please refer to github.com/TobikoData/sqlmesh/issues/4717 for more information." + f"Ignoring unsupported constraints for column '{name}' with definition '{column.data_type}'. Please refer to github.com/SQLMesh/sqlmesh/issues/4717 for more information." ) kind = column_def.kind if kind: diff --git a/sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py b/sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py index 02e2a5f4c1..5407e5a99a 100644 --- a/sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py +++ b/sqlmesh/migrations/v0092_warn_about_dbt_data_type_diff.py @@ -5,7 +5,7 @@ doesn't match dbt's behavior. dbt only uses data_type for contracts/validation, not DDL. This fix may cause diffs if tables were created with incorrect types. -More context: https://github.com/TobikoData/sqlmesh/pull/5231 +More context: https://github.com/SQLMesh/sqlmesh/pull/5231 """ import json @@ -33,7 +33,7 @@ def migrate_rows(engine_adapter, schema, **kwargs): # type: ignore "tables may have been created with incorrect column types. After this migration, run " "'sqlmesh diff prod' to check for column type differences, and if any are found, " "apply a plan to correct the table schemas. For more details, see: " - "https://github.com/TobikoData/sqlmesh/pull/5231." + "https://github.com/SQLMesh/sqlmesh/pull/5231." ) for (snapshot,) in engine_adapter.fetchall( diff --git a/tests/core/engine_adapter/test_mssql.py b/tests/core/engine_adapter/test_mssql.py index bf28157d00..ec6a4ba3e8 100644 --- a/tests/core/engine_adapter/test_mssql.py +++ b/tests/core/engine_adapter/test_mssql.py @@ -833,7 +833,7 @@ def test_create_table_from_query(make_mocked_engine_adapter: t.Callable, mocker: columns_mock.assert_called_once_with(exp.table_("__temp_ctas_test_random_id", quoted=True)) # We don't want to drop anything other than LIMIT 0 - # See https://github.com/TobikoData/sqlmesh/issues/4048 + # See https://github.com/SQLMesh/sqlmesh/issues/4048 adapter.ctas( table_name="test_schema.test_table", query_or_df=parse_one( @@ -848,7 +848,7 @@ def test_create_table_from_query(make_mocked_engine_adapter: t.Callable, mocker: def test_replace_query_strategy(adapter: MSSQLEngineAdapter, mocker: MockerFixture): - # ref issue 4472: https://github.com/TobikoData/sqlmesh/issues/4472 + # ref issue 4472: https://github.com/SQLMesh/sqlmesh/issues/4472 # The FULL strategy calls EngineAdapter.replace_query() which calls _insert_overwrite_by_condition() should use DELETE+INSERT and not MERGE expressions = d.parse( f""" diff --git a/tests/core/test_config.py b/tests/core/test_config.py index f3a0de6672..9ae239f298 100644 --- a/tests/core/test_config.py +++ b/tests/core/test_config.py @@ -1050,7 +1050,7 @@ def test_environment_statements_config(tmp_path): ] -# https://github.com/TobikoData/sqlmesh/pull/4049 +# https://github.com/SQLMesh/sqlmesh/pull/4049 def test_pydantic_import_error() -> None: class TestConfig(DuckDBConnectionConfig): pass diff --git a/tests/fixtures/dbt/empty_project/profiles.yml b/tests/fixtures/dbt/empty_project/profiles.yml index adae09e9c6..712456bffe 100644 --- a/tests/fixtures/dbt/empty_project/profiles.yml +++ b/tests/fixtures/dbt/empty_project/profiles.yml @@ -7,7 +7,7 @@ empty_project: type: duckdb # database is required for dbt < 1.5 where our adapter deliberately doesnt infer the database from the path and # defaults it to "main", which raises a "project catalog doesnt match context catalog" error - # ref: https://github.com/TobikoData/sqlmesh/pull/1109 + # ref: https://github.com/SQLMesh/sqlmesh/pull/1109 database: empty_project path: 'empty_project.duckdb' threads: 4 diff --git a/tests/pyproject.toml b/tests/pyproject.toml index 6f9cd2f9d9..73f143bfde 100644 --- a/tests/pyproject.toml +++ b/tests/pyproject.toml @@ -8,8 +8,8 @@ license = { text = "Apache License 2.0" } [project.urls] Homepage = "https://sqlmesh.com/" Documentation = "https://sqlmesh.readthedocs.io/en/stable/" -Repository = "https://github.com/TobikoData/sqlmesh" -Issues = "https://github.com/TobikoData/sqlmesh/issues" +Repository = "https://github.com/SQLMesh/sqlmesh" +Issues = "https://github.com/SQLMesh/sqlmesh/issues" [build-system] requires = ["setuptools", "setuptools_scm", "toml"] diff --git a/web/common/package.json b/web/common/package.json index 6a0965f19e..924bbaa883 100644 --- a/web/common/package.json +++ b/web/common/package.json @@ -101,7 +101,7 @@ "tailwindcss": "3.4.17" }, "private": false, - "repository": "TobikoData/sqlmesh", + "repository": "SQLMesh/sqlmesh", "scripts": { "build": "tsc -p tsconfig.build.json && vite build --base './' && pnpm run build:css", "build-storybook": "storybook build",