Skip to content

Commit

Permalink
parent 5c69890
Browse files Browse the repository at this point in the history
author Kyle Beauchamp <[email protected]> 1585263686 -0700
committer Kyle Beauchamp <[email protected]> 1585752147 -0700

Add additional fields for redshift

Update changelog

Use python variable naming, rather than AWS

Update changelog with new var names

Use default setting of False for maximum backwards compatibility

Use field and default_factory(list) for empty list default

Change order of class variables for maximum consistency

Add contributors and lint fix

make warn return empty string

add test

add to changelog

Try to add unit test

Remove extra test

Slim the manifest

remove raw_sql from ParsedMacros, remove file_contents from docs
get rid of seed_file_path
Do not write 'manifest.files' to disk
  • Loading branch information
kyleabeauchamp committed Apr 1, 2020
1 parent 5c69890 commit 30fe9b9
Show file tree
Hide file tree
Showing 20 changed files with 39 additions and 497 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,19 @@
- Added a `get-manifest` API call. ([#2168](https:/fishtown-analytics/dbt/issues/2168), [#2232](https:/fishtown-analytics/dbt/pull/2232))
- Support adapter-specific aliases (like `project` and `dataset` on BigQuery) in source definitions. ([#2133](https:/fishtown-analytics/dbt/issues/2133), [#2244](https:/fishtown-analytics/dbt/pull/2244))
- Users can now use jinja as arguments to tests. Test arguments are rendered in the native context and injected into the test execution context directly. ([#2149](https:/fishtown-analytics/dbt/issues/2149), [#2220](https:/fishtown-analytics/dbt/pull/2220))
- Added support for `db_groups` and `autocreate` flags in Redshift configurations. ([#1995](https:/fishtown-analytics/dbt/issues/1995, [#2262]https:/fishtown-analytics/dbt/pull/2262))

### Fixes
- When a jinja value is undefined, give a helpful error instead of failing with cryptic "cannot pickle ParserMacroCapture" errors ([#2110](https:/fishtown-analytics/dbt/issues/2110), [#2184](https:/fishtown-analytics/dbt/pull/2184))
- Added timeout to registry download call ([#2195](https:/fishtown-analytics/dbt/issues/2195), [#2228](https:/fishtown-analytics/dbt/pull/2228))
- When a macro is called with invalid arguments, include the calling model in the output ([#2073](https:/fishtown-analytics/dbt/issues/2073), [#2238](https:/fishtown-analytics/dbt/pull/2238))
- When a warn exception is not in a jinja do block, return an empty string instead of None ([#2222](https:/fishtown-analytics/dbt/issues/2222), [#2259](https:/fishtown-analytics/dbt/pull/2259))

Contributors:
- [@raalsky](https:/Raalsky) ([#2224](https:/fishtown-analytics/dbt/pull/2224), [#2228](https:/fishtown-analytics/dbt/pull/2228))
- [@ilkinulas](https:/ilkinulas) [#2199](https:/fishtown-analytics/dbt/pull/2199)

- [@kyleabeauchamp](https:/kyleabeauchamp) [#2262](https:/fishtown-analytics/dbt/pull/2262)
- [@jeremyyeo](https:/jeremyyeo) [#2259](https:/fishtown-analytics/dbt/pull/2259)

## dbt 0.16.0 (March 23, 2020)

Expand Down
11 changes: 5 additions & 6 deletions core/dbt/clients/jinja.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,23 +126,22 @@ def render(self, *args, **kwargs):


class TemplateCache:

def __init__(self):
self.file_cache = {}
self.file_cache: Dict[str, jinja2.Template] = {}

def get_node_template(self, node):
key = (node.package_name, node.original_file_path)
def get_node_template(self, node) -> jinja2.Template:
key = node.macro_sql

if key in self.file_cache:
return self.file_cache[key]

template = get_template(
string=node.raw_sql,
string=node.macro_sql,
ctx={},
node=node,
)
self.file_cache[key] = template

self.file_cache[key] = template
return template

def clear(self):
Expand Down
4 changes: 3 additions & 1 deletion core/dbt/context/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,9 @@ def load_agate_table(self) -> agate.Table:
'can only load_agate_table for seeds (got a {})'
.format(self.model.resource_type)
)
path = self.model.seed_file_path
path = os.path.join(
self.model.root_path, self.model.original_file_path
)
column_types = self.model.config.column_types
try:
table = agate_helper.from_csv(path, text_columns=column_types)
Expand Down
9 changes: 1 addition & 8 deletions core/dbt/contracts/graph/compiled.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
)
from dbt.node_types import NodeType
from dbt.contracts.util import Replaceable
from dbt.exceptions import InternalException, RuntimeException
from dbt.exceptions import RuntimeException

from hologram import JsonSchemaMixin
from dataclasses import dataclass, field
Expand Down Expand Up @@ -95,13 +95,6 @@ class CompiledRPCNode(CompiledNode):
class CompiledSeedNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
config: SeedConfig = field(default_factory=SeedConfig)
seed_file_path: str = ''

def __post_init__(self):
if self.seed_file_path == '':
raise InternalException(
'Seeds should always have a seed_file_path'
)

@property
def empty(self):
Expand Down
3 changes: 0 additions & 3 deletions core/dbt/contracts/graph/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,7 +698,6 @@ def writable_manifest(self):
disabled=self.disabled,
child_map=forward_edges,
parent_map=backward_edges,
files=self.files,
)

@classmethod
Expand Down Expand Up @@ -835,5 +834,3 @@ class WritableManifest(JsonSchemaMixin, Writable):
parent_map: Optional[NodeEdgeMap]
child_map: Optional[NodeEdgeMap]
metadata: ManifestMetadata
# map of original_file_path to all unique IDs provided by that file
files: Mapping[str, SourceFile]
13 changes: 3 additions & 10 deletions core/dbt/contracts/graph/parsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from dbt.clients.system import write_file
import dbt.flags
from dbt.contracts.graph.unparsed import (
UnparsedNode, UnparsedMacro, UnparsedDocumentationFile, Quoting, Docs,
UnparsedNode, UnparsedDocumentation, Quoting, Docs,
UnparsedBaseNode, FreshnessThreshold, ExternalTable,
AdditionalPropertiesAllowed, HasYamlMetadata, MacroArgument
)
Expand Down Expand Up @@ -274,13 +274,6 @@ class SeedConfig(NodeConfig):
class ParsedSeedNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
config: SeedConfig = field(default_factory=SeedConfig)
seed_file_path: str = ''

def __post_init__(self):
if self.seed_file_path == '':
raise dbt.exceptions.InternalException(
'Seeds should always have a seed_file_path'
)

@property
def empty(self):
Expand Down Expand Up @@ -496,7 +489,7 @@ class ParsedMacroPatch(ParsedPatch):


@dataclass
class ParsedMacro(UnparsedMacro, HasUniqueID):
class ParsedMacro(UnparsedBaseNode, HasUniqueID):
name: str
macro_sql: str
resource_type: NodeType = field(metadata={'restrict': [NodeType.Macro]})
Expand All @@ -523,7 +516,7 @@ def patch(self, patch: ParsedMacroPatch):


@dataclass
class ParsedDocumentation(UnparsedDocumentationFile, HasUniqueID):
class ParsedDocumentation(UnparsedDocumentation, HasUniqueID):
name: str
block_contents: str

Expand Down
8 changes: 6 additions & 2 deletions core/dbt/contracts/graph/unparsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,13 +271,17 @@ def yaml_key(self) -> 'str':


@dataclass
class UnparsedDocumentationFile(JsonSchemaMixin, Replaceable):
class UnparsedDocumentation(JsonSchemaMixin, Replaceable):
package_name: str
root_path: str
path: str
original_file_path: str
file_contents: str

@property
def resource_type(self):
return NodeType.Documentation


@dataclass
class UnparsedDocumentationFile(UnparsedDocumentation):
file_contents: str
3 changes: 2 additions & 1 deletion core/dbt/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -819,7 +819,8 @@ def warn_or_raise(exc, log_fmt=None):
def warn(msg, node=None):
# there's no reason to expose log_fmt to macros - it's only useful for
# handling colors
return warn_or_error(msg, node=node)
warn_or_error(msg, node=node)
return ""


# Update this when a new function should be added to the
Expand Down
1 change: 1 addition & 0 deletions core/dbt/parser/docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def _parse_template_docs(self, template, docfile):
'block_contents': item().strip(),
}
)
merged.pop('file_contents', None)
yield ParsedDocumentation.from_dict(merged)

def parse_block(self, block: FullBlock) -> Iterable[ParsedDocumentation]:
Expand Down
1 change: 0 additions & 1 deletion core/dbt/parser/macros.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def parse_macro(
macro_sql=block.full_block,
original_file_path=base_node.original_file_path,
package_name=base_node.package_name,
raw_sql=base_node.raw_sql,
root_path=base_node.root_path,
resource_type=base_node.resource_type,
name=name,
Expand Down
16 changes: 0 additions & 16 deletions core/dbt/parser/seeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,19 +30,3 @@ def render_with_context(

def load_file(self, match: FilePath) -> SourceFile:
return SourceFile.seed(match)

def _create_parsetime_node(
self,
block: FileBlock,
path: str,
config: SourceConfig,
name=None,
**kwargs,
) -> ParsedSeedNode:
return super()._create_parsetime_node(
block=block,
path=path,
config=config,
name=name,
seed_file_path=block.path.full_path,
)
11 changes: 8 additions & 3 deletions plugins/redshift/dbt/adapters/redshift/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from hologram.helpers import StrEnum

from dataclasses import dataclass, field
from typing import Optional
from typing import Optional, List

drop_lock: Lock = dbt.flags.MP_CONTEXT.Lock()

Expand Down Expand Up @@ -47,6 +47,8 @@ class RedshiftCredentials(PostgresCredentials):
iam_duration_seconds: int = 900
search_path: Optional[str] = None
keepalives_idle: int = 240
autocreate: bool = False
db_groups: List[str] = field(default_factory=list)

@property
def type(self):
Expand Down Expand Up @@ -85,7 +87,7 @@ def fresh_transaction(self, name=None):

@classmethod
def fetch_cluster_credentials(cls, db_user, db_name, cluster_id,
duration_s):
duration_s, autocreate, db_groups):
"""Fetches temporary login credentials from AWS. The specified user
must already exist in the database, or else an error will occur"""
boto_client = boto3.client('redshift')
Expand All @@ -96,7 +98,8 @@ def fetch_cluster_credentials(cls, db_user, db_name, cluster_id,
DbName=db_name,
ClusterIdentifier=cluster_id,
DurationSeconds=duration_s,
AutoCreate=False)
AutoCreate=autocreate,
DbGroups=db_groups,)

except boto_client.exceptions.ClientError as e:
raise dbt.exceptions.FailedToConnectException(
Expand All @@ -121,6 +124,8 @@ def get_tmp_iam_cluster_credentials(cls, credentials):
credentials.database,
credentials.cluster_id,
iam_duration_s,
credentials.autocreate,
credentials.db_groups,
)

# replace username and password with temporary redshift credentials
Expand Down
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
{% do exceptions.warn('warning: everything is terrible but not that terrible') %}
{{ exceptions.warn("warning: everything is terrible but not that terrible") }}
select 1 as id
Loading

0 comments on commit 30fe9b9

Please sign in to comment.