Skip to content

Add support for single record queries by primary key #590

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
205 changes: 205 additions & 0 deletions src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -991,6 +991,19 @@ pub struct NodeBuilder {
pub selections: Vec<NodeSelection>,
}

#[derive(Clone, Debug)]
pub struct NodeByPkBuilder {
// args - map of column name to value
pub pk_values: HashMap<String, serde_json::Value>,

pub _alias: String,

// metadata
pub table: Arc<Table>,

pub selections: Vec<NodeSelection>,
}

#[derive(Clone, Debug)]
pub enum NodeSelection {
Connection(ConnectionBuilder),
Expand All @@ -1009,6 +1022,16 @@ pub struct NodeIdInstance {
pub values: Vec<serde_json::Value>,
}

impl NodeIdInstance {
pub fn validate(&self, table: &Table) -> Result<(), String> {
// Validate that nodeId belongs to the table being queried
if self.schema_name != table.schema || self.table_name != table.name {
return Err("nodeId belongs to a different collection".to_string());
}
Ok(())
}
}

#[derive(Clone, Debug)]
pub struct NodeIdBuilder {
pub alias: String,
Expand Down Expand Up @@ -2028,6 +2051,188 @@ where
})
}

pub fn to_node_by_pk_builder<'a, T>(
field: &__Field,
query_field: &graphql_parser::query::Field<'a, T>,
fragment_definitions: &Vec<FragmentDefinition<'a, T>>,
variables: &serde_json::Value,
variable_definitions: &Vec<VariableDefinition<'a, T>>,
) -> Result<NodeByPkBuilder, String>
where
T: Text<'a> + Eq + AsRef<str> + Clone,
T::Value: Hash,
{
let type_ = field.type_().unmodified_type();
let alias = alias_or_name(query_field);

match type_ {
__Type::Node(xtype) => {
let type_name = xtype
.name()
.ok_or("Encountered type without name in node_by_pk builder")?;

let field_map = field_map(&__Type::Node(xtype.clone()));

// Get primary key columns from the table
let pkey = xtype
.table
.primary_key()
.ok_or("Table has no primary key".to_string())?;

// Create a map of expected field arguments based on the field's arg definitions
let mut pk_arg_map = HashMap::new();
for arg in field.args() {
if let Some(NodeSQLType::Column(col)) = &arg.sql_type {
pk_arg_map.insert(arg.name().to_string(), col.name.clone());
}
}

let mut pk_values = HashMap::new();

// Process each argument in the query
for arg in &query_field.arguments {
let arg_name = arg.0.as_ref();

// Find the corresponding column name from our argument map
if let Some(col_name) = pk_arg_map.get(arg_name) {
let value = to_gson(&arg.1, variables, variable_definitions)?;
let json_value = gson::gson_to_json(&value)?;
pk_values.insert(col_name.clone(), json_value);
}
}

// Need values for all primary key columns
if pk_values.len() != pkey.column_names.len() {
return Err("All primary key columns must be provided".to_string());
}

let mut builder_fields = vec![];
let selection_fields = normalize_selection_set(
&query_field.selection_set,
fragment_definitions,
&type_name,
variables,
)?;

for selection_field in selection_fields {
match field_map.get(selection_field.name.as_ref()) {
None => {
return Err(format!(
"Unknown field '{}' on type '{}'",
selection_field.name.as_ref(),
&type_name
))
}
Some(f) => {
let alias = alias_or_name(&selection_field);

let node_selection = match &f.sql_type {
Some(node_sql_type) => match node_sql_type {
NodeSQLType::Column(col) => NodeSelection::Column(ColumnBuilder {
alias,
column: Arc::clone(col),
}),
NodeSQLType::Function(func) => {
let function_selection = match &f.type_() {
__Type::Scalar(_) => FunctionSelection::ScalarSelf,
__Type::List(_) => FunctionSelection::Array,
__Type::Node(_) => {
let node_builder = to_node_builder(
f,
&selection_field,
fragment_definitions,
variables,
&[],
variable_definitions,
)?;
FunctionSelection::Node(node_builder)
}
__Type::Connection(_) => {
let connection_builder = to_connection_builder(
f,
&selection_field,
fragment_definitions,
variables,
&[],
variable_definitions,
)?;
FunctionSelection::Connection(connection_builder)
}
_ => {
return Err(
"invalid return type from function".to_string()
)
}
};
NodeSelection::Function(FunctionBuilder {
alias,
function: Arc::clone(func),
table: Arc::clone(&xtype.table),
selection: function_selection,
})
}
NodeSQLType::NodeId(pkey_columns) => {
NodeSelection::NodeId(NodeIdBuilder {
alias,
columns: pkey_columns.clone(),
table_name: xtype.table.name.clone(),
schema_name: xtype.table.schema.clone(),
})
}
},
_ => match f.name().as_ref() {
"__typename" => NodeSelection::Typename {
alias: alias_or_name(&selection_field),
typename: xtype.name().expect("node type should have a name"),
},
_ => match f.type_().unmodified_type() {
__Type::Connection(_) => {
let con_builder = to_connection_builder(
f,
&selection_field,
fragment_definitions,
variables,
&[],
variable_definitions,
);
NodeSelection::Connection(con_builder?)
}
__Type::Node(_) => {
let node_builder = to_node_builder(
f,
&selection_field,
fragment_definitions,
variables,
&[],
variable_definitions,
);
NodeSelection::Node(node_builder?)
}
_ => {
return Err(format!(
"unexpected field type on node {}",
f.name()
));
}
},
},
};
builder_fields.push(node_selection);
}
}
}

Ok(NodeByPkBuilder {
pk_values,
_alias: alias,
table: Arc::clone(&xtype.table),
selections: builder_fields,
})
}
_ => Err("cannot build query for non-node type".to_string()),
}
}

// Introspection

#[allow(clippy::large_enum_variant)]
Expand Down
57 changes: 56 additions & 1 deletion src/graphql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1247,6 +1247,61 @@ impl ___Type for QueryType {
};

f.push(collection_entrypoint);

// Add single record query by primary key if the table has a primary key
// and the primary key types are supported (int, bigint, uuid, string)
if let Some(primary_key) = table.primary_key() {
if table.has_supported_pk_types_for_by_pk() {
let node_type = NodeType {
table: Arc::clone(table),
fkey: None,
reverse_reference: None,
schema: Arc::clone(&self.schema),
};

// Create arguments for each primary key column
let mut pk_args = Vec::new();
for col_name in &primary_key.column_names {
if let Some(col) = table.columns.iter().find(|c| &c.name == col_name) {
let col_type = sql_column_to_graphql_type(col, &self.schema)
.ok_or_else(|| {
format!(
"Could not determine GraphQL type for column {}",
col_name
)
})
.unwrap_or_else(|_| __Type::Scalar(Scalar::String(None)));

// Use graphql_column_field_name to convert snake_case to camelCase if needed
let arg_name = self.schema.graphql_column_field_name(col);

pk_args.push(__InputValue {
name_: arg_name,
type_: __Type::NonNull(NonNullType {
type_: Box::new(col_type),
}),
description: Some(format!("The record's `{}` value", col_name)),
default_value: None,
sql_type: Some(NodeSQLType::Column(Arc::clone(col))),
});
}
}

let pk_entrypoint = __Field {
name_: format!("{}ByPk", lowercase_first_letter(table_base_type_name)),
type_: __Type::Node(node_type),
args: pk_args,
description: Some(format!(
"Retrieve a record of type `{}` by its primary key",
table_base_type_name
)),
deprecation_reason: None,
sql_type: None,
};

f.push(pk_entrypoint);
}
}
}
}

Expand Down Expand Up @@ -3433,7 +3488,7 @@ impl FromStr for FilterOp {
"contains" => Ok(Self::Contains),
"containedBy" => Ok(Self::ContainedBy),
"overlaps" => Ok(Self::Overlap),
_ => Err("Invalid filter operation".to_string()),
other => Err(format!("Invalid filter operation: {}", other)),
}
}
}
Expand Down
52 changes: 52 additions & 0 deletions src/resolve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,58 @@ where
Err(msg) => res_errors.push(ErrorMessage { message: msg }),
}
}
__Type::Node(_) => {
// Determine if this is a primary key query field
let has_pk_args = !field_def.args().is_empty()
&& field_def.args().iter().all(|arg| {
// All PK field args have a SQL Column type
arg.sql_type.is_some()
&& matches!(
arg.sql_type.as_ref().unwrap(),
NodeSQLType::Column(_)
)
});

if has_pk_args {
let node_by_pk_builder = to_node_by_pk_builder(
field_def,
selection,
&fragment_definitions,
variables,
variable_definitions,
);

match node_by_pk_builder {
Ok(builder) => match builder.execute() {
Ok(d) => {
res_data[alias_or_name(selection)] = d;
}
Err(msg) => res_errors.push(ErrorMessage { message: msg }),
},
Err(msg) => res_errors.push(ErrorMessage { message: msg }),
}
} else {
// Regular node access
let node_builder = to_node_builder(
field_def,
selection,
&fragment_definitions,
variables,
&[],
variable_definitions,
);

match node_builder {
Ok(builder) => match builder.execute() {
Ok(d) => {
res_data[alias_or_name(selection)] = d;
}
Err(msg) => res_errors.push(ErrorMessage { message: msg }),
},
Err(msg) => res_errors.push(ErrorMessage { message: msg }),
}
}
}
__Type::__Type(_) => {
let __type_builder = schema_type.to_type_builder(
field_def,
Expand Down
Loading