Skip to content

Commit

Permalink
Merge branch 'master' into remote-relationships-permissions
Browse files Browse the repository at this point in the history
  • Loading branch information
codingkarthik committed Nov 5, 2020
2 parents 7fde85b + 81fd1ec commit 545acb4
Show file tree
Hide file tree
Showing 20 changed files with 385 additions and 494 deletions.
2 changes: 2 additions & 0 deletions server/graphql-engine.cabal
Original file line number Diff line number Diff line change
Expand Up @@ -457,9 +457,11 @@ library
, Hasura.GraphQL.Context
, Hasura.GraphQL.Parser
, Hasura.GraphQL.Parser.Class
, Hasura.GraphQL.Parser.Class.Parse
, Hasura.GraphQL.Parser.Collect
, Hasura.GraphQL.Parser.Column
, Hasura.GraphQL.Parser.Internal.Parser
, Hasura.GraphQL.Parser.Internal.Types
, Hasura.GraphQL.Parser.Monad
, Hasura.GraphQL.Parser.Schema
, Hasura.GraphQL.Schema
Expand Down
8 changes: 4 additions & 4 deletions server/src-lib/Hasura/Backends/Postgres/Translate/BoolExp.hs
Original file line number Diff line number Diff line change
Expand Up @@ -258,14 +258,14 @@ parseOperationsExpression rhsParser fim columnInfo =

-- This convoluted expression instead of col = val
-- to handle the case of col : null
equalsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
equalsBoolExpBuilder :: SQLExp 'Postgres -> SQLExp 'Postgres -> S.BoolExp
equalsBoolExpBuilder qualColExp rhsExp =
S.BEBin S.OrOp (S.BECompare S.SEQ qualColExp rhsExp)
(S.BEBin S.AndOp
(S.BENull qualColExp)
(S.BENull rhsExp))

notEqualsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
notEqualsBoolExpBuilder :: SQLExp 'Postgres -> SQLExp 'Postgres -> S.BoolExp
notEqualsBoolExpBuilder qualColExp rhsExp =
S.BEBin S.OrOp (S.BECompare S.SNE qualColExp rhsExp)
(S.BEBin S.AndOp
Expand Down Expand Up @@ -377,13 +377,13 @@ foldBoolExp f = \case
BoolFld ce -> f ce

mkFieldCompExp
:: S.Qual -> FieldName -> OpExpG 'Postgres S.SQLExp -> S.BoolExp
:: S.Qual -> FieldName -> OpExpG 'Postgres (SQLExp 'Postgres) -> S.BoolExp
mkFieldCompExp qual lhsField = mkCompExp (mkQField lhsField)
where
mkQCol = S.SEQIdentifier . S.QIdentifier qual . toIdentifier
mkQField = S.SEQIdentifier . S.QIdentifier qual . Identifier . getFieldNameTxt

mkCompExp :: S.SQLExp -> OpExpG 'Postgres S.SQLExp -> S.BoolExp
mkCompExp :: SQLExp 'Postgres -> OpExpG 'Postgres (SQLExp 'Postgres) -> S.BoolExp
mkCompExp lhs = \case
ACast casts -> mkCastsExp casts
AEQ False val -> equalsBoolExpBuilder lhs val
Expand Down
38 changes: 19 additions & 19 deletions server/src-lib/Hasura/Backends/Postgres/Translate/Select.hs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ selectFromToFromItem pfx = \case
-- from the FromItem generated with selectFromToFromItem
-- however given from S.FromItem is modelled, it is not
-- possible currently
selectFromToQual :: SelectFrom backend -> S.Qual
selectFromToQual :: SelectFrom 'Postgres -> S.Qual
selectFromToQual = \case
FromTable tn -> S.QualTable tn
FromIdentifier i -> S.QualifiedIdentifier i Nothing
Expand Down Expand Up @@ -340,22 +340,22 @@ mkSimilarArrayFields annFields maybeOrderBys =
Just (riName ri, mkOrderByFieldName $ riName ri)
fetchAggOrderByRels _ = Nothing

getArrayRelNameAndSelectArgs :: ArraySelectG backend v -> (RelName, SelectArgsG backend v)
getArrayRelNameAndSelectArgs :: ArraySelectG 'Postgres v -> (RelName, SelectArgsG 'Postgres v)
getArrayRelNameAndSelectArgs = \case
ASSimple r -> (aarRelationshipName r, _asnArgs $ aarAnnSelect r)
ASAggregate r -> (aarRelationshipName r, _asnArgs $ aarAnnSelect r)
ASConnection r -> (aarRelationshipName r, _asnArgs $ _csSelect $ aarAnnSelect r)

getAnnArr :: (a, AnnFieldG backend v) -> Maybe (a, ArraySelectG backend v)
getAnnArr :: (a, AnnFieldG 'Postgres v) -> Maybe (a, ArraySelectG 'Postgres v)
getAnnArr (f, annFld) = case annFld of
AFArrayRelation (ASConnection _) -> Nothing
AFArrayRelation ar -> Just (f, ar)
_ -> Nothing


withWriteJoinTree
:: (MonadWriter (JoinTree backend) m)
=> (JoinTree backend -> b -> JoinTree backend)
:: (MonadWriter (JoinTree 'Postgres) m)
=> (JoinTree 'Postgres -> b -> JoinTree 'Postgres)
-> m (a, b)
-> m a
withWriteJoinTree joinTreeUpdater action =
Expand All @@ -366,8 +366,8 @@ withWriteJoinTree joinTreeUpdater action =
pure (out, fromJoinTree)

withWriteObjectRelation
:: (MonadWriter (JoinTree backend) m, Hashable (ObjectRelationSource backend))
=> m ( ObjectRelationSource backend
:: (MonadWriter (JoinTree 'Postgres) m)
=> m ( ObjectRelationSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
, a
)
Expand Down Expand Up @@ -418,8 +418,8 @@ withWriteArrayConnection action =
in mempty{_jtArrayConnections = HM.singleton source arraySelectNode}

withWriteComputedFieldTableSet
:: (MonadWriter (JoinTree backend) m)
=> m ( ComputedFieldTableSetSource
:: (MonadWriter (JoinTree 'Postgres) m)
=> m ( ComputedFieldTableSetSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
, a
)
Expand All @@ -442,7 +442,7 @@ processAnnSimpleSelect
-> FieldName
-> PermissionLimitSubQuery
-> AnnSimpleSel 'Postgres
-> m ( SelectSource
-> m ( SelectSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
)
processAnnSimpleSelect sourcePrefixes fieldAlias permLimitSubQuery annSimpleSel = do
Expand All @@ -464,7 +464,7 @@ processAnnAggregateSelect
=> SourcePrefixes
-> FieldName
-> AnnAggregateSelect 'Postgres
-> m ( SelectSource
-> m ( SelectSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
, S.Extractor
)
Expand Down Expand Up @@ -513,8 +513,8 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do

mkPermissionLimitSubQuery
:: Maybe Int
-> TableAggregateFields backend
-> Maybe (NE.NonEmpty (AnnOrderByItem backend))
-> TableAggregateFields 'Postgres
-> Maybe (NE.NonEmpty (AnnOrderByItem 'Postgres))
-> PermissionLimitSubQuery
mkPermissionLimitSubQuery permLimit aggFields orderBys =
case permLimit of
Expand Down Expand Up @@ -589,7 +589,7 @@ processSelectParams
-> PermissionLimitSubQuery
-> TablePerm 'Postgres
-> SelectArgs 'Postgres
-> m ( SelectSource
-> m ( SelectSource 'Postgres
, [(S.Alias, S.SQLExp)]
, Maybe S.SQLExp -- Order by cursor
)
Expand Down Expand Up @@ -823,15 +823,15 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields = do
pure $ toJSONableExp strfyNum (pgiType col) asText $ withColumnOp colOpM $
S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ pgiColumn col

fromScalarComputedField :: ComputedFieldScalarSelect S.SQLExp -> m S.SQLExp
fromScalarComputedField :: ComputedFieldScalarSelect 'Postgres S.SQLExp -> m S.SQLExp
fromScalarComputedField computedFieldScalar = do
strfyNum <- ask
pure $ toJSONableExp strfyNum (PGColumnScalar ty) False $ withColumnOp colOpM $
S.SEFunction $ S.FunctionExp fn (fromTableRowArgs sourcePrefix args) Nothing
where
ComputedFieldScalarSelect fn args ty colOpM = computedFieldScalar

withColumnOp :: Maybe ColumnOp -> S.SQLExp -> S.SQLExp
withColumnOp :: Maybe (ColumnOp 'Postgres) -> S.SQLExp -> S.SQLExp
withColumnOp colOpM sqlExp = case colOpM of
Nothing -> sqlExp
Just (ColumnOp opText cExp) -> S.mkSQLOpExp opText sqlExp cExp
Expand Down Expand Up @@ -862,7 +862,7 @@ mkJoinCond baseTablepfx colMapn =

generateSQLSelect
:: S.BoolExp -- ^ Pre join condition
-> SelectSource
-> SelectSource 'Postgres
-> SelectNode 'Postgres
-> S.Select
generateSQLSelect joinCondition selectSource selectNode =
Expand Down Expand Up @@ -928,7 +928,7 @@ generateSQLSelect joinCondition selectSource selectNode =
in S.FISelectWith (S.Lateral True) selectWith alias

computedFieldToFromItem
:: (ComputedFieldTableSetSource, SelectNode 'Postgres) -> S.FromItem
:: (ComputedFieldTableSetSource 'Postgres, SelectNode 'Postgres) -> S.FromItem
computedFieldToFromItem (computedFieldTableSource, node) =
let ComputedFieldTableSetSource fieldName selectTy source = computedFieldTableSource
internalSelect = generateSQLSelect (S.BELit True) source node
Expand All @@ -942,7 +942,7 @@ generateSQLSelect joinCondition selectSource selectNode =
in S.mkLateralFromItem select alias

generateSQLSelectFromArrayNode
:: SelectSource
:: SelectSource 'Postgres
-> ArraySelectNode 'Postgres
-> S.BoolExp
-> S.Select
Expand Down
51 changes: 9 additions & 42 deletions server/src-lib/Hasura/GraphQL/Parser/Class.hs
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
-- | Classes for monads used during schema construction and query parsing.
module Hasura.GraphQL.Parser.Class where
module Hasura.GraphQL.Parser.Class
( MonadParse (..)
, parseError
, QueryReusability (..)
, module Hasura.GraphQL.Parser.Class
) where

import Hasura.Prelude

Expand All @@ -8,14 +13,14 @@ import qualified Language.Haskell.TH as TH
import qualified Language.GraphQL.Draft.Syntax as G

import Data.Has
import Data.Parser.JSONPath
import Data.Text.Extended
import Data.Tuple.Extended
import GHC.Stack (HasCallStack)
import Type.Reflection (Typeable)

import Hasura.Backends.Postgres.SQL.Types
import {-# SOURCE #-} Hasura.GraphQL.Parser.Internal.Parser
import Hasura.GraphQL.Parser.Class.Parse
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Table
import Hasura.RQL.Types.RemoteSchema
Expand Down Expand Up @@ -156,7 +161,7 @@ getTableGQLName
getTableGQLName table = do
tableInfo <- askTableInfo table
let tableCustomName = _tcCustomName . _tciCustomConfig . _tiCoreInfo $ tableInfo
maybe (qualifiedObjectToName table) pure tableCustomName
tableCustomName `onNothing` qualifiedObjectToName table

-- | A wrapper around 'memoizeOn' that memoizes a function by using its argument
-- as the key.
Expand Down Expand Up @@ -189,41 +194,3 @@ memoize4
-> (a -> b -> c -> d -> m (Parser k n e))
-> (a -> b -> c -> d -> m (Parser k n e))
memoize4 name = curry4 . memoize name . uncurry4

-- | A class that provides functionality for parsing GraphQL queries, i.e.
-- running a fully-constructed 'Parser'.
class Monad m => MonadParse m where
withPath :: (JSONPath -> JSONPath) -> m a -> m a
-- | Not the full power of 'MonadError' because parse errors cannot be
-- caught.
parseErrorWith :: Code -> Text -> m a
-- | See 'QueryReusability'.
markNotReusable :: m ()

parseError :: MonadParse m => Text -> m a
parseError = parseErrorWith ValidationFailed

-- | Tracks whether or not a query is /reusable/. Reusable queries are nice,
-- since we can cache their resolved ASTs and avoid re-resolving them if we
-- receive an identical query. However, we can’t always safely reuse queries if
-- they have variables, since some variable values can affect the generated SQL.
-- For example, consider the following query:
--
-- > query users_where($condition: users_bool_exp!) {
-- > users(where: $condition) {
-- > id
-- > }
-- > }
--
-- Different values for @$condition@ will produce completely different queries,
-- so we can’t reuse its plan (unless the variable values were also all
-- identical, of course, but we don’t bother caching those).
data QueryReusability = Reusable | NotReusable

instance Semigroup QueryReusability where
NotReusable <> _ = NotReusable
_ <> NotReusable = NotReusable
Reusable <> Reusable = Reusable

instance Monoid QueryReusability where
mempty = Reusable
5 changes: 0 additions & 5 deletions server/src-lib/Hasura/GraphQL/Parser/Class.hs-boot

This file was deleted.

46 changes: 46 additions & 0 deletions server/src-lib/Hasura/GraphQL/Parser/Class/Parse.hs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
-- | Classes for monads used during schema construction and query parsing.
module Hasura.GraphQL.Parser.Class.Parse where

import Hasura.Prelude

import Data.Parser.JSONPath

import Hasura.RQL.Types.Error

-- | A class that provides functionality for parsing GraphQL queries, i.e.
-- running a fully-constructed 'Parser'.
class Monad m => MonadParse m where
withPath :: (JSONPath -> JSONPath) -> m a -> m a
-- | Not the full power of 'MonadError' because parse errors cannot be
-- caught.
parseErrorWith :: Code -> Text -> m a
-- | See 'QueryReusability'.
markNotReusable :: m ()

parseError :: MonadParse m => Text -> m a
parseError = parseErrorWith ValidationFailed

-- | Tracks whether or not a query is /reusable/. Reusable queries are nice,
-- since we can cache their resolved ASTs and avoid re-resolving them if we
-- receive an identical query. However, we can’t always safely reuse queries if
-- they have variables, since some variable values can affect the generated SQL.
-- For example, consider the following query:
--
-- > query users_where($condition: users_bool_exp!) {
-- > users(where: $condition) {
-- > id
-- > }
-- > }
--
-- Different values for @$condition@ will produce completely different queries,
-- so we can’t reuse its plan (unless the variable values were also all
-- identical, of course, but we don’t bother caching those).
data QueryReusability = Reusable | NotReusable

instance Semigroup QueryReusability where
NotReusable <> _ = NotReusable
_ <> NotReusable = NotReusable
Reusable <> Reusable = Reusable

instance Monoid QueryReusability where
mempty = Reusable
33 changes: 18 additions & 15 deletions server/src-lib/Hasura/GraphQL/Parser/Collect.hs
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,18 @@ module Hasura.GraphQL.Parser.Collect
( collectFields
) where

import Hasura.Prelude
import Hasura.Prelude

import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.HashMap.Strict.InsOrd as OMap

import Data.List.Extended (duplicates)
import Language.GraphQL.Draft.Syntax
import Data.List.Extended (duplicates)
import Language.GraphQL.Draft.Syntax

import Data.Text.Extended
import Hasura.GraphQL.Parser.Class
import {-# SOURCE #-} Hasura.GraphQL.Parser.Internal.Parser (boolean, runParser)
import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Utils (showNames)
import Data.Text.Extended
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Utils (showNames)

-- | Collects the effective set of fields queried by a selection set by
-- flattening fragments and merging duplicate fields.
Expand All @@ -33,10 +32,12 @@ collectFields
=> t Name
-- ^ The names of the object types and interface types the 'SelectionSet' is
-- selecting against.
-> (InputValue Variable -> m Bool)
-- ^ Please pass 'runParser boolean' here (passed explicitly to avoid cyclic imports)
-> SelectionSet NoFragments Variable
-> m (InsOrdHashMap Name (Field NoFragments Variable))
collectFields objectTypeNames selectionSet =
mergeFields =<< flattenSelectionSet objectTypeNames selectionSet
collectFields objectTypeNames boolParser selectionSet =
mergeFields =<< flattenSelectionSet objectTypeNames boolParser selectionSet

-- | Flattens inline fragments in a selection set. For example,
--
Expand Down Expand Up @@ -92,9 +93,11 @@ flattenSelectionSet
:: (MonadParse m, Foldable t)
=> t Name
-- ^ The name of the object type the 'SelectionSet' is selecting against.
-> (InputValue Variable -> m Bool)
-- ^ Please pass 'runParser boolean' here (passed explicitly to avoid cyclic imports)
-> SelectionSet NoFragments Variable
-> m [Field NoFragments Variable]
flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection
flattenSelectionSet objectTypeNames boolParser = fmap concat . traverse flattenSelection
where
-- The easy case: just a single field.
flattenSelection (SelectionField field) = do
Expand Down Expand Up @@ -130,7 +133,7 @@ flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection

flattenInlineFragment InlineFragment{ _ifDirectives, _ifSelectionSet } = do
validateDirectives _ifDirectives
flattenSelectionSet objectTypeNames _ifSelectionSet
flattenSelectionSet objectTypeNames boolParser _ifSelectionSet

applyInclusionDirectives directives continue
| Just directive <- find ((== $$(litName "include")) . _dName) directives
Expand All @@ -142,7 +145,7 @@ flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection
applyInclusionDirective adjust Directive{ _dName, _dArguments } continue = do
ifArgument <- Map.lookup $$(litName "if") _dArguments `onNothing`
parseError ("missing \"if\" argument for " <> _dName <<> " directive")
value <- runParser boolean $ GraphQLValue ifArgument
value <- boolParser $ GraphQLValue ifArgument
if adjust value then continue else pure []

validateDirectives directives =
Expand Down
Loading

0 comments on commit 545acb4

Please sign in to comment.