Skip to content

Commit

Permalink
take project_id and dataset_id as input
Browse files Browse the repository at this point in the history
  • Loading branch information
pranshi06 committed Sep 17, 2024
1 parent 3c9b2f6 commit 97c313a
Show file tree
Hide file tree
Showing 9 changed files with 101 additions and 19 deletions.
22 changes: 17 additions & 5 deletions crates/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,11 +110,23 @@ async fn initialize(with_metadata: bool, context: Context<impl Environment>) ->
),
},
),
supported_environment_variables: vec![metadata::EnvironmentVariableDefinition {
name: "HASURA_BIGQUERY_SERVICE_KEY".to_string(),
description: "The BigQuery service key".to_string(),
default_value: None,
}],
supported_environment_variables: vec![
metadata::EnvironmentVariableDefinition {
name: "HASURA_BIGQUERY_SERVICE_KEY".to_string(),
description: "The BigQuery service key".to_string(),
default_value: None,
},
metadata::EnvironmentVariableDefinition {
name: "HASURA_BIGQUERY_PROJECT_ID".to_string(),
description: "The BigQuery project ID/name".to_string(),
default_value: None,
},
metadata::EnvironmentVariableDefinition {
name: "HASURA_BIGQUERY_DATASET_ID".to_string(),
description: "The BigQuery dataset ID/name".to_string(),
default_value: None,
}
],
commands: metadata::Commands {
update: Some("hasura-ndc-bigquery update".to_string()),
watch: None,
Expand Down
34 changes: 34 additions & 0 deletions crates/configuration/src/values/database_info.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};

use super::Secret;

#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)]
pub struct ProjectId(pub Secret);

impl From<String> for ProjectId {
fn from(value: String) -> Self {
Self(value.into())
}
}

impl From<&str> for ProjectId {
fn from(value: &str) -> Self {
Self::from(value.to_string())
}
}

#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, JsonSchema)]
pub struct DatasetId(pub Secret);

impl From<String> for DatasetId {
fn from(value: String) -> Self {
Self(value.into())
}
}

impl From<&str> for DatasetId {
fn from(value: &str) -> Self {
Self::from(value.to_string())
}
}
2 changes: 2 additions & 0 deletions crates/configuration/src/values/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
mod pool_settings;
mod secret;
pub mod uri;
pub mod database_info;

pub use pool_settings::PoolSettings;
pub use secret::Secret;
pub use uri::ConnectionUri;
pub use database_info::{DatasetId, ProjectId};
40 changes: 33 additions & 7 deletions crates/configuration/src/version1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

use crate::environment::Environment;
use crate::error::WriteParsedConfigurationError;
use crate::values::{self, ConnectionUri, PoolSettings, Secret};
use crate::values::{self, ConnectionUri, PoolSettings, ProjectId, Secret, DatasetId};

use super::error::ParseConfigurationError;
use gcp_bigquery_client::model::job_configuration_query::JobConfigurationQuery;
Expand Down Expand Up @@ -33,7 +33,9 @@ use query_engine_metadata::metadata::{

const CURRENT_VERSION: u32 = 1;
pub const CONFIGURATION_FILENAME: &str = "configuration.json";
pub const DEFAULT_CONNECTION_URI_VARIABLE: &str = "HASURA_BIGQUERY_SERVICE_KEY";
pub const DEFAULT_SERVICE_KEY_VARIABLE: &str = "HASURA_BIGQUERY_SERVICE_KEY";
pub const DEFAULT_PROJECT_ID_VARIABLE: &str = "HASURA_BIGQUERY_PROJECT_ID";
pub const DEFAULT_DATASET_ID_VARIABLE: &str = "HASURA_BIGQUERY_DATASET_ID";
const CONFIGURATION_QUERY: &str = include_str!("config2.sql");
const CONFIGURATION_JSONSCHEMA_FILENAME: &str = "schema.json";

Expand Down Expand Up @@ -73,6 +75,8 @@ pub struct ParsedConfiguration {
pub version: u32,
// Connection string for a Postgres-compatible database
pub service_key: ConnectionUri,
pub project_id: ProjectId,
pub dataset_id: DatasetId,
#[serde(skip_serializing_if = "PoolSettings::is_default")]
#[serde(default)]
pub pool_settings: PoolSettings,
Expand Down Expand Up @@ -181,7 +185,13 @@ impl ParsedConfiguration {
Self {
version: CURRENT_VERSION,
service_key: ConnectionUri(Secret::FromEnvironment {
variable: DEFAULT_CONNECTION_URI_VARIABLE.into(),
variable: DEFAULT_SERVICE_KEY_VARIABLE.into(),
}),
project_id: ProjectId(Secret::FromEnvironment {
variable: DEFAULT_PROJECT_ID_VARIABLE.into(),
}),
dataset_id: DatasetId(Secret::FromEnvironment {
variable: DEFAULT_DATASET_ID_VARIABLE.into(),
}),
pool_settings: PoolSettings::default(),
metadata: metadata::Metadata::default(),
Expand Down Expand Up @@ -367,17 +377,31 @@ pub async fn configure(
args: &ParsedConfiguration,
environment: impl Environment,
) -> anyhow::Result<ParsedConfiguration> {
let uri = match &args.service_key {
let service_key = match &args.service_key {
ConnectionUri(Secret::Plain(value)) => Cow::Borrowed(value),
ConnectionUri(Secret::FromEnvironment { variable }) => {
Cow::Owned(environment.read(variable)?)
}
};

let service_account_key = yup_oauth2::parse_service_account_key(uri.as_str()).unwrap();
let project_id_ = match &args.project_id {
ProjectId(Secret::Plain(value)) => Cow::Borrowed(value),
ProjectId(Secret::FromEnvironment { variable }) => {
Cow::Owned(environment.read(variable)?)
}
};

let dataset_id_ = match &args.dataset_id {
DatasetId(Secret::Plain(value)) => Cow::Borrowed(value),
DatasetId(Secret::FromEnvironment { variable }) => {
Cow::Owned(environment.read(variable)?)
}
};

let service_account_key = yup_oauth2::parse_service_account_key(service_key.as_str()).unwrap();

let project_id = "hasura-development";
let dataset_id = "chinook_sample";
let project_id = project_id_.as_str();
let dataset_id = dataset_id_.as_str();

let schema_name = format!("{}.{}", project_id, dataset_id);
let database_name = schema_name.clone();
Expand Down Expand Up @@ -477,6 +501,8 @@ pub async fn configure(
Ok(ParsedConfiguration {
version: 1,
service_key: args.service_key.clone(),
project_id: args.project_id.clone(),
dataset_id: args.dataset_id.clone(),
pool_settings: args.pool_settings.clone(),
metadata: metadata::Metadata {
tables: tables_info,
Expand Down
2 changes: 1 addition & 1 deletion crates/connectors/ndc-bigquery/src/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ async fn execute_query(
) -> Result<JsonResponse<models::QueryResponse>, query_engine_execution::error::Error> {
let timer = state.metrics.time_query_execution();
let result =
query_engine_execution::query::execute(&state.bigquery_client, &state.metrics, plan)
query_engine_execution::query::execute(&state.bigquery_client, &state.metrics, &state.project_id, plan)
.await
.map(JsonResponse::Serialized);
timer.complete_with(result)
Expand Down
7 changes: 7 additions & 0 deletions crates/connectors/ndc-bigquery/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ use query_engine_execution::metrics;
pub struct State {
pub metrics: metrics::Metrics,
pub bigquery_client: gcp_bigquery_client::Client,
pub project_id: String,
pub dataset_id: String,
}

/// Create a connection pool and wrap it inside a connector State.
Expand All @@ -41,10 +43,15 @@ pub async fn create_state(
gcp_bigquery_client::Client::from_service_account_key(service_account_key, false)
.await
.unwrap();

let project_id = std::env::var("HASURA_BIGQUERY_PROJECT_ID").unwrap();
let dataset_id = std::env::var("HASURA_BIGQUERY_DATASET_ID").unwrap();

Ok(State {
metrics,
bigquery_client,
project_id,
dataset_id,
})
}

Expand Down
2 changes: 1 addition & 1 deletion crates/connectors/ndc-bigquery/tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub async fn create_router_from_deployment(deployment_path: &str) -> axum::Route
let _ = env_logger::builder().is_test(true).try_init();

let environment = HashMap::from([(
ndc_bigquery_configuration::version1::DEFAULT_CONNECTION_URI_VARIABLE.into(),
ndc_bigquery_configuration::version1::DEFAULT_SERVICE_KEY_VARIABLE.into(),
POSTGRESQL_CONNECTION_STRING.to_string(),
)]);

Expand Down
6 changes: 3 additions & 3 deletions crates/connectors/ndc-bigquery/tests/configuration_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use std::{collections::HashMap, fs};

use similar_asserts::assert_eq;

use ndc_bigquery_configuration::{configuration, values::Secret, version1, ConnectionUri};
use ndc_bigquery_configuration::{configuration, values::Secret, version1::{self, DEFAULT_SERVICE_KEY_VARIABLE}, ConnectionUri};

use tests_common::deployment::helpers::get_path_from_project_root;

Expand All @@ -32,11 +32,11 @@ async fn test_configure() {
.expect("Unable to deserialize as RawConfiguration");

let environment = HashMap::from([(
version1::DEFAULT_CONNECTION_URI_VARIABLE.into(),
version1::DEFAULT_SERVICE_KEY_VARIABLE.into(),
POSTGRESQL_CONNECTION_STRING.into(),
)]);

args.service_key = ConnectionUri(Secret::Plain((POSTGRESQL_CONNECTION_STRING.to_string())));
args.service_key = ConnectionUri(Secret::Plain(DEFAULT_SERVICE_KEY_VARIABLE.to_string()));

let actual = version1::configure(&args, environment)
.await
Expand Down
5 changes: 3 additions & 2 deletions crates/query-engine/execution/src/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use query_engine_sql::sql;
pub async fn execute(
bigquery_client: &gcp_bigquery_client::Client,
metrics: &metrics::Metrics,
project_id: &String,
plan: sql::execution_plan::ExecutionPlan<sql::execution_plan::Query>,
) -> Result<Bytes, Error> {
// let query_timer = metrics.time_query_execution();
Expand All @@ -41,7 +42,7 @@ pub async fn execute(
None => {
// TODO: need to parse this from service account key or allow user to provide it
// TODO(PY)
let project_id = "hasura-development";
// let project_id = "hasura-development";

// let mut inner_rows = vec![];

Expand Down Expand Up @@ -83,7 +84,7 @@ pub async fn execute(
// Query
let mut rs = bigquery_client
.job()
.query(project_id, query_request)
.query(project_id.as_str(), query_request)
.await
.unwrap();

Expand Down

0 comments on commit 97c313a

Please sign in to comment.