Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions sqlmesh/core/dialect.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,10 @@ def _parse_if(self: Parser) -> t.Optional[exp.Expr]:
if last_token.token_type == TokenType.R_PAREN:
self._tokens[-2].comments.extend(last_token.comments)
self._tokens.pop()
if hasattr(self, "_tokens_size"):
# keep _tokens_size in sync sqlglot 30.0.3 caches len(_tokens)
# _advance() tries to read tokens[index + 1] past the new end
self._tokens_size -= 1
else:
self.raise_error("Expecting )")

Expand Down
5 changes: 5 additions & 0 deletions sqlmesh/utils/jinja.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,11 @@ def extract(self, jinja: str, dialect: str = "") -> t.Dict[str, MacroInfo]:
self.reset()
self.sql = jinja
self._tokens = Dialect.get_or_raise(dialect).tokenize(jinja)

# guard for older sqlglot versions (before 30.0.3)
if hasattr(self, "_tokens_size"):
# keep the cached length in sync
self._tokens_size = len(self._tokens)
self._index = -1
self._advance()

Expand Down
5 changes: 5 additions & 0 deletions tests/dbt/cli/test_run.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import typing as t
import pytest
from pathlib import Path
import shutil
from click.testing import Result
import time_machine
from sqlmesh_dbt.operations import create
Expand Down Expand Up @@ -71,6 +72,10 @@ def test_run_with_changes_and_full_refresh(
if partial_parse_file.exists():
partial_parse_file.unlink()

cache_dir = project_path / ".cache"
if cache_dir.exists():
shutil.rmtree(cache_dir)

# run with --full-refresh. this should:
# - fully refresh model_a (pick up the new records from external_table)
# - deploy the local change to model_b (introducing the 'changed' column)
Expand Down
Loading