diff --git a/gooddata-dbt/gooddata_dbt/dbt/profiles.py b/gooddata-dbt/gooddata_dbt/dbt/profiles.py index 3745db3c2..b775c97ba 100644 --- a/gooddata-dbt/gooddata_dbt/dbt/profiles.py +++ b/gooddata-dbt/gooddata_dbt/dbt/profiles.py @@ -15,6 +15,7 @@ CatalogDataSourceVertica, MotherDuckAttributes, PostgresAttributes, + RedshiftAttributes, SnowflakeAttributes, TokenCredentialsFromEnvVar, VerticaAttributes, @@ -54,6 +55,37 @@ def to_gooddata(self, data_source_id: str, schema_name: str) -> CatalogDataSourc ) +@attrs.define(auto_attribs=True, kw_only=True) +class DbtOutputRedshift(Base): + name: str + title: str + host: str + port: str + user: str + password: str = attrs.field(repr=lambda value: "***") + dbname: str + database: str = attrs.field(default=attrs.Factory(lambda self: self.dbname, takes_self=True)) + schema: str + + def to_gooddata(self, data_source_id: str, schema_name: str) -> CatalogDataSourcePostgres: + return CatalogDataSourcePostgres( + id=data_source_id, + name=self.title, + db_specific_attributes=RedshiftAttributes( + host=self.host, + port=self.port, + # TODO - adopt this in Python SDK + db_name=quote_plus(self.dbname), + ), + # Schema name is collected from dbt manifest from relevant tables + schema=schema_name, + credentials=BasicCredentials( + username=self.user, + password=self.password, + ), + ) + + @attrs.define(auto_attribs=True, kw_only=True) class DbtOutputSnowflake(Base): name: str @@ -152,7 +184,7 @@ def to_gooddata(self, data_source_id: str, schema_name: str) -> CatalogDataSourc ) -DbtOutput = Union[DbtOutputPostgreSQL, DbtOutputSnowflake, DbtOutputVertica, DbtOutputMotherDuck] +DbtOutput = Union[DbtOutputPostgreSQL, DbtOutputRedshift, DbtOutputSnowflake, DbtOutputVertica, DbtOutputMotherDuck] @attrs.define(auto_attribs=True, kw_only=True) @@ -189,16 +221,21 @@ def inject_env_vars(output_def: Dict) -> None: # else do nothing, real value seems to be stored in dbt profile @staticmethod - def to_data_class(output: str, output_def: Dict) -> DbtOutput: + def to_data_class(output: str, output_def: Dict) -> Optional[DbtOutput]: db_type = output_def["type"] if db_type == "postgres": return DbtOutputPostgreSQL.from_dict({"name": output, **output_def}) + elif db_type == "redshift": + return DbtOutputRedshift.from_dict({"name": output, **output_def}) elif db_type == "snowflake": return DbtOutputSnowflake.from_dict({"name": output, **output_def}) elif db_type == "vertica": return DbtOutputVertica.from_dict({"name": output, **output_def}) - elif db_type == "duckdb": + elif db_type == "duckdb" and output_def["path"].startswith("md:"): return DbtOutputMotherDuck.from_dict({"name": output, **output_def}) + elif db_type == "duckdb": + # No logging available here. Pass because GoodData cannot connect to DuckDB file. + return None else: raise Exception(f"Unsupported database type {output=} {db_type=}") @@ -210,7 +247,8 @@ def profiles(self) -> List[DbtProfile]: for output, output_def in profile_def["outputs"].items(): self.inject_env_vars(output_def) dbt_output = self.to_data_class(output, output_def) - outputs.append(dbt_output) + if dbt_output: + outputs.append(dbt_output) profiles.append(DbtProfile(name=profile, outputs=outputs)) return profiles