Skip to content

Commit

Permalink
Code cleanup and correction of typos
Browse files Browse the repository at this point in the history
  • Loading branch information
Andy2003 committed Nov 16, 2021
1 parent 7ac1c04 commit 086f6e7
Show file tree
Hide file tree
Showing 36 changed files with 62 additions and 69 deletions.
4 changes: 2 additions & 2 deletions core/src/main/kotlin/org/neo4j/graphql/AugmentationHandler.kt
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ abstract class AugmentationHandler(
} else {
FieldOperator.forType(typeDefinition, field.type.inner().isNeo4jType())
.forEach { op -> builder.addFilterField(op.fieldName(field.name), op.list, filterType, field.description) }
if (typeDefinition.isNeo4jSpatialType() == true) {
if (typeDefinition.isNeo4jSpatialType()) {
val distanceFilterType = getSpatialDistanceFilter(neo4jTypeDefinitionRegistry.getUnwrappedType(filterType) as TypeDefinition<*>)
FieldOperator.forType(distanceFilterType, true)
.forEach { op -> builder.addFilterField(op.fieldName(field.name + NEO4j_POINT_DISTANCE_FILTER_SUFFIX), op.list, NEO4j_POINT_DISTANCE_FILTER) }
Expand Down Expand Up @@ -301,7 +301,7 @@ abstract class AugmentationHandler(

fun Type<*>.resolve(): TypeDefinition<*>? = getTypeFromAnyRegistry(name())
fun Type<*>.isScalar(): Boolean = resolve() is ScalarTypeDefinition
fun Type<*>.isNeo4jType(): Boolean = name()
private fun Type<*>.isNeo4jType(): Boolean = name()
?.takeIf {
!ScalarInfo.GRAPHQL_SPECIFICATION_SCALARS_DEFINITIONS.containsKey(it)
&& it.startsWith("_Neo4j") // TODO remove this check by refactoring neo4j input types
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ object DynamicProperties {
return when (input) {
!is Value<*> -> throw CoercingParseLiteralException("Expected AST type 'StringValue' but was '${input::class.java.simpleName}'.")
is NullValue -> null
is ObjectValue -> input.objectFields.map { it.name to parseNested(it.value, variables) }.toMap()
is ObjectValue -> input.objectFields.associate { it.name to parseNested(it.value, variables) }
else -> Assert.assertShouldNeverHappen("Only maps structures are expected")
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,4 @@ fun String.asDescription() = Description(this, null, this.contains("\n"))
fun String.capitalize(): String = replaceFirstChar { if (it.isLowerCase()) it.titlecase(Locale.getDefault()) else it.toString() }
fun String.decapitalize(): String = replaceFirstChar { it.lowercase(Locale.getDefault()) }
fun String.toUpperCase(): String = uppercase(Locale.getDefault())
fun String.toLowerCase(): String = lowercase(Locale.getDefault())
11 changes: 3 additions & 8 deletions core/src/main/kotlin/org/neo4j/graphql/Neo4jTypes.kt
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ open class Neo4jConverter(
name: String,
val prefixedName: String = "_Neo4j$name",
val typeDefinition: TypeDefinition = TypeDefinition(name, prefixedName)
) : Neo4jSimpleConverter(name) {
}
) : Neo4jSimpleConverter(name)

open class Neo4jSimpleConverter(val name: String) {
protected fun toExpression(parameter: Expression): Expression {
Expand Down Expand Up @@ -115,19 +114,15 @@ private val neo4jConverter = listOf(
Neo4jTimeConverter("Time"),
Neo4jTimeConverter("LocalDateTime"),
Neo4jPointConverter("Point"),
)
.map { it.prefixedName to it }
.toMap()
).associateBy { it.prefixedName }

private val neo4jScalarConverter = listOf(
Neo4jTemporalConverter("LocalTime"),
Neo4jTemporalConverter("Date"),
Neo4jTemporalConverter("DateTime"),
Neo4jTemporalConverter("Time"),
Neo4jTemporalConverter("LocalDateTime")
)
.map { it.name to it }
.toMap()
).associateBy { it.name }

val NEO4j_TEMPORAL_TYPES = neo4jScalarConverter.keys

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/kotlin/org/neo4j/graphql/SchemaBuilder.kt
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ class SchemaBuilder(
enhancedRegistry.add(ObjectTypeDefinition.newObjectTypeDefinition().name("Query").build())

if (schemaDefinition != null) {
// otherwise adding a transform schema would fail
// otherwise, adding a transform schema would fail
enhancedRegistry.remove(schemaDefinition)
} else {
schemaDefinition = SchemaDefinition.newSchemaDefinition().build()
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/kotlin/org/neo4j/graphql/SchemaConfig.kt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ data class SchemaConfig @JvmOverloads constructor(

/**
* if enabled the `filter` argument will be named `where` and the input type will be named `<typeName>Where`.
* additionally the separated filter arguments will no longer be generated.
* additionally, the separated filter arguments will no longer be generated.
*/
val useWhereFilter: Boolean = false,

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,7 @@ import graphql.schema.GraphQLType
import org.neo4j.cypherdsl.core.Statement
import org.neo4j.cypherdsl.core.renderer.Configuration
import org.neo4j.cypherdsl.core.renderer.Renderer
import org.neo4j.graphql.Cypher
import org.neo4j.graphql.SchemaConfig
import org.neo4j.graphql.Translator
import org.neo4j.graphql.aliasOrName
import org.neo4j.graphql.*
import org.neo4j.graphql.handler.projection.ProjectionBase

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ abstract class BaseDataFetcherForContainer(schemaConfig: SchemaConfig) : BaseDat

/**
* @param propertyName the name used in neo4j
* @param accessorFactory an factory for crating an expression to access the property
* @param accessorFactory a factory for crating an expression to access the property
*/
class PropertyAccessor(
val propertyName: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ typealias WhereClauseFactory = (
typealias ConditionBuilder = (ExposesWith) -> OrderableOngoingReadingAndWithWithoutWhere

/**
* This its a specialized handler that uses an alternative approach for filtering. By using multiple MATCH clauses,
* This is a specialized handler that uses an alternative approach for filtering. By using multiple MATCH clauses,
* this can facilitate the use of optimizations within the neo4j database, which can lead to significant performance
* improvements for large data sets.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import org.neo4j.graphql.handler.BaseDataFetcherForContainer
/**
* This is a base class for all handler acting on relations / edges
*/
abstract class BaseRelationHandler(val prefix: String, schemaConfig: SchemaConfig) : BaseDataFetcherForContainer(schemaConfig) {
abstract class BaseRelationHandler(private val prefix: String, schemaConfig: SchemaConfig) : BaseDataFetcherForContainer(schemaConfig) {

lateinit var relation: RelationshipInfo<GraphQLFieldsContainer>
lateinit var startId: RelatedField
Expand All @@ -27,7 +27,7 @@ abstract class BaseRelationHandler(val prefix: String, schemaConfig: SchemaConfi
)

abstract class BaseRelationFactory(
val prefix: String,
private val prefix: String,
schemaConfig: SchemaConfig,
typeDefinitionRegistry: TypeDefinitionRegistry,
neo4jTypeDefinitionRegistry: TypeDefinitionRegistry
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/kotlin/org/neo4j/graphql/parser/QueryParser.kt
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import org.neo4j.graphql.handler.projection.ProjectionBase
typealias CypherDSL = org.neo4j.cypherdsl.core.Cypher

/**
* An internal representation of all the filtering passed to an graphql field
* An internal representation of all the filtering passed to a graphql field
*/
class ParsedQuery(
val fieldPredicates: List<FieldPredicate>,
Expand Down Expand Up @@ -107,7 +107,7 @@ object QueryParser {
}

/**
* This parser takes all non-filter arguments of a graphql-field an transform it to the internal [ParsedQuery]-representation
* This parser takes all non-filter arguments of a graphql-field and transform it to the internal [ParsedQuery]-representation
*/
fun parseArguments(arguments: Map<String, Any>, fieldDefinition: GraphQLFieldDefinition, type: GraphQLFieldsContainer): ParsedQuery {
// Map of all queried fields
Expand Down Expand Up @@ -155,7 +155,7 @@ object QueryParser {
.mapNotNull { (predicate, queryFieldName) ->
queriedFields[queryFieldName]?.let { (index, objectField) ->
if (predicate.requireParam xor (objectField != null)) {
// if we got a value but the predicate requires none
// if we got a value but the predicate requires none,
// or we got a no value but the predicate requires one
// we skip this operator
null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ class CypherTestSuite(fileName: String, val neo4j: Neo4j? = null) : AsciiDocTest
if (neo4j != null) {
val testData = globalBlocks[TEST_DATA_MARKER]
var response = codeBlocks[GRAPHQL_RESPONSE_IGNORE_ORDER_MARKER]
var ignoreOrder = false;
var ignoreOrder = false
if (response != null) {
ignoreOrder = true;
ignoreOrder = true
} else {
response = getOrCreateBlock(codeBlocks, GRAPHQL_RESPONSE_MARKER, "GraphQL-Response")
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package org.neo4j.graphql.utils

import graphql.language.InterfaceTypeDefinition
import graphql.language.ScalarTypeDefinition
import graphql.schema.GraphQLScalarType
import graphql.schema.GraphQLSchema
import graphql.schema.GraphQLType
Expand All @@ -14,7 +13,10 @@ import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Assumptions
import org.junit.jupiter.api.DynamicNode
import org.junit.jupiter.api.DynamicTest
import org.neo4j.graphql.*
import org.neo4j.graphql.NoOpCoercing
import org.neo4j.graphql.SchemaBuilder
import org.neo4j.graphql.SchemaConfig
import org.neo4j.graphql.requiredName
import org.opentest4j.AssertionFailedError
import java.util.*
import java.util.regex.Pattern
Expand Down
20 changes: 10 additions & 10 deletions core/src/test/resources/cypher-tests.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -4421,7 +4421,7 @@ query {
MATCH (`temporalNode`:`TemporalNode`) WITH `temporalNode` ORDER BY temporalNode.datetime DESC RETURN `temporalNode` {_id: ID(`temporalNode`),datetime: { year: `temporalNode`.datetime.year , month: `temporalNode`.datetime.month , day: `temporalNode`.datetime.day , hour: `temporalNode`.datetime.hour , minute: `temporalNode`.datetime.minute , second: `temporalNode`.datetime.second , millisecond: `temporalNode`.datetime.millisecond , microsecond: `temporalNode`.datetime.microsecond , nanosecond: `temporalNode`.datetime.nanosecond , timezone: `temporalNode`.datetime.timezone , formatted: toString(`temporalNode`.datetime) },temporalNodes: [sortedElement IN apoc.coll.sortMulti([(`temporalNode`)-[:`TEMPORAL`]->(`temporalNode_temporalNodes`:`TemporalNode`) | temporalNode_temporalNodes {_id: ID(`temporalNode_temporalNodes`),datetime: `temporalNode_temporalNodes`.datetime,time: `temporalNode_temporalNodes`.time,temporalNodes: [sortedElement IN apoc.coll.sortMulti([(`temporalNode_temporalNodes`)-[:`TEMPORAL`]->(`temporalNode_temporalNodes_temporalNodes`:`TemporalNode`) | temporalNode_temporalNodes_temporalNodes {_id: ID(`temporalNode_temporalNodes_temporalNodes`),datetime: `temporalNode_temporalNodes_temporalNodes`.datetime,time: `temporalNode_temporalNodes_temporalNodes`.time}], ['datetime','time']) | sortedElement { .*, datetime: {year: sortedElement.datetime.year,formatted: toString(sortedElement.datetime)},time: {hour: sortedElement.time.hour}}][1..3] }], ['^datetime']) | sortedElement { .*, datetime: {year: sortedElement.datetime.year,month: sortedElement.datetime.month,day: sortedElement.datetime.day,hour: sortedElement.datetime.hour,minute: sortedElement.datetime.minute,second: sortedElement.datetime.second,millisecond: sortedElement.datetime.millisecond,microsecond: sortedElement.datetime.microsecond,nanosecond: sortedElement.datetime.nanosecond,timezone: sortedElement.datetime.timezone,formatted: toString(sortedElement.datetime)},time: {hour: sortedElement.time.hour}}] } AS `temporalNode`
----

=== Handle @cypher field with String payload using cypherParams
=== Handle the @cypher field with String payload using cypherParams

.GraphQL-Query
[source,graphql]
Expand Down Expand Up @@ -4541,7 +4541,7 @@ query {
WITH apoc.cypher.runFirstColumn("RETURN { userId: $cypherParams.currentUserId }", {offset:$offset, first:$first, cypherParams: $cypherParams}, True) AS x UNWIND x AS `currentUserId` RETURN `currentUserId` { .userId } AS `currentUserId`
----

=== Handle @cypher query with Boolean payload
=== Handle the @cypher query with Boolean payload

.GraphQL-Query
[source,graphql]
Expand All @@ -4563,7 +4563,7 @@ query {
WITH apoc.cypher.runFirstColumn("RETURN true", {offset:$offset, first:$first, cypherParams: $cypherParams}, True) AS x UNWIND x AS `boolean` RETURN `boolean`
----

=== Handle @cypher query with Int payload
=== Handle the @cypher query with Int payload

.GraphQL-Query
[source,graphql]
Expand All @@ -4585,7 +4585,7 @@ query {
WITH apoc.cypher.runFirstColumn("RETURN 1", {offset:$offset, first:$first, cypherParams: $cypherParams}, True) AS x UNWIND x AS `int` RETURN `int`
----

=== Handle @cypher query with Float payload
=== Handle the @cypher query with Float payload

.GraphQL-Query
[source,graphql]
Expand All @@ -4607,7 +4607,7 @@ query {
WITH apoc.cypher.runFirstColumn("RETURN 3.14", {offset:$offset, first:$first, cypherParams: $cypherParams}, True) AS x UNWIND x AS `float` RETURN `float`
----

=== Handle @cypher query with String list payload
=== Handle the @cypher query with String list payload

.GraphQL-Query
[source,graphql]
Expand All @@ -4629,7 +4629,7 @@ query {
WITH apoc.cypher.runFirstColumn("UNWIND ['hello', 'world'] AS stringList RETURN stringList", {offset:$offset, first:$first, cypherParams: $cypherParams}, True) AS x UNWIND x AS `string` RETURN `string`
----

=== Handle @cypher query with Int list payload
=== Handle the @cypher query with Int list payload

.GraphQL-Query
[source,graphql]
Expand All @@ -4651,7 +4651,7 @@ query {
WITH apoc.cypher.runFirstColumn("UNWIND [1, 2, 3] AS intList RETURN intList", {offset:$offset, first:$first, cypherParams: $cypherParams}, True) AS x UNWIND x AS `int` RETURN `int`
----

=== Handle @cypher query with Temporal payload
=== Handle the @cypher query with Temporal payload

.GraphQL-Query
[source,graphql]
Expand Down Expand Up @@ -4856,7 +4856,7 @@ CALL apoc.cypher.doIt("RETURN $strInputArg.strArg", {strArg:$strArg, strInputArg
RETURN `string`
----

=== Handle @cypher query with parameterized input type argument
=== Handle the @cypher query with parameterized input type argument

.GraphQL-Query
[source,graphql]
Expand All @@ -4878,7 +4878,7 @@ query someQuery ($strArg: String, $strInputArg: strInput) {
WITH apoc.cypher.runFirstColumn("RETURN $strInputArg.strArg", {offset:$offset, first:$first, strArg:$strArg, strInputArg:$strInputArg, cypherParams: $cypherParams}, True) AS x UNWIND x AS `string` RETURN `string`
----

=== Handle @cypher field on root query type with scalar payload, no args
=== Handle the @cypher field on root query type with scalar payload, no args

.GraphQL-Query
[source,graphql]
Expand All @@ -4902,7 +4902,7 @@ query {
MATCH (`temporalNode`:`TemporalNode`) RETURN `temporalNode` {computedTimestamp: apoc.cypher.runFirstColumn("RETURN toString(datetime())", {this: temporalNode}, false)} AS `temporalNode`
----

=== Handle @cypher field with parameterized value for field of input type argument
=== Handle the @cypher field with parameterized value for field of input type argument

.GraphQL-Query
[source,graphql]
Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-112.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #112: deleting bidirectional relationships
= GitHub Issue #112: deleting bidirectional relationships

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-147.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #147: Filtering on Directive in Combination with Filtering on Type Field
= GitHub Issue #147: Filtering on Directive in Combination with Filtering on Type Field

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-149.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #149: OptimizationStrategy FILTER_AS_MATCH breaks offset/first input attributes for queries
= GitHub Issue #149: OptimizationStrategy FILTER_AS_MATCH breaks offset/first input attributes for queries

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-160.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #160: Incorrect query translation for @property
= GitHub Issue #160: Incorrect query translation for @property

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-163.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #163: problem with optimized FILTER_AS_MATCH strategy
= GitHub Issue #163: problem with optimized FILTER_AS_MATCH strategy

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-169.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #169: Default values for sorting and paging on fields should be respected
= GitHub Issue #169: Default values for sorting and paging on fields should be respected

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-170.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #170: wrong mapping starting query from rich relationship
= GitHub Issue #170: wrong mapping starting query from rich relationship

== Schema

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #190: cypher directive with passThrough
= GitHub Issue #190: cypher directive with passThrough

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-210.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #210: Nested filters are not working
= GitHub Issue #210: Nested filters are not working

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-27.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #27: Filtering on multi-relationship existence
= GitHub Issue #27: Filtering on multi-relationship existence

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-45.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #45: A schema for tree-like structure fails with duplicate fields
= GitHub Issue #45: A schema for tree-like structure fails with duplicate fields

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-47.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #47: lack of filtering for null values
= GitHub Issue #47: lack of filtering for null values

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-65.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #65: Filtering on children through parent fields generates logically invalid cypher
= GitHub Issue #65: Filtering on children through parent fields generates logically invalid cypher

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/issues/gh-85.adoc
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
:toc:

= Github Issue #85: not-supported cypher directive with scalar result
= GitHub Issue #85: not-supported cypher directive with scalar result

== Schema

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/resources/logback-test.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%-4relative [%thread] %-5level %logger{35} - %msg %n</pattern>
<pattern>%-4relative [%thread] %-5level %logger{350} - %msg %n</pattern>
</encoder>
</appender>

Expand Down
Loading

0 comments on commit 086f6e7

Please sign in to comment.