diff --git a/README.md b/README.md index c6690a1..e3846d5 100644 --- a/README.md +++ b/README.md @@ -73,10 +73,12 @@ Your build can contain multiple schemas. They are stored in the `graphqlSchemas` This allows to compare arbitrary schemas, write schema.json files for each of them and validate your queries against them. -There are already two schemas predefined. The `build` schema and the `prod` schema. +There is already one schemas predefined. The `build` schema is defined by the `graphqlSchemaGen` task. +You can configure the `graphqlSchemas` label with -* The `build` schema is defined by the `graphqlSchemaGen` task. -* The `prod` schema is defined by the `graphqlProductionSchema`. +```sbt +name in graphqlSchemaGen := "local-build" +``` ### Add a schema @@ -86,16 +88,6 @@ Schemas are defined via a `GraphQLSchema` case class. You need to define * a `description`. Explain where this schema comes from and what it represents * a `schemaTask`. A sbt task that generates the schema -This is how the `prod` schema is defined. - -```scala -graphqlSchemas += GraphQLSchema( - GraphQLSchemaLabels.PROD, - "schema generated by the graphqlProductionSchema task", - graphqlProductionSchema.taskValue -) -``` - You can also define a schema from a `SchemaLoader`. This requires defining an anonymous sbt task. ```scala diff --git a/src/main/scala/rocks/muki/graphql/GraphQLCodegenPlugin.scala b/src/main/scala/rocks/muki/graphql/GraphQLCodegenPlugin.scala index 3e774fc..1ad9b41 100644 --- a/src/main/scala/rocks/muki/graphql/GraphQLCodegenPlugin.scala +++ b/src/main/scala/rocks/muki/graphql/GraphQLCodegenPlugin.scala @@ -27,8 +27,8 @@ object GraphQLCodegenPlugin extends AutoPlugin { excludeFilter in graphqlCodegen := HiddenFileFilter, graphqlCodegenQueries := Defaults .collectFiles(resourceDirectories in graphqlCodegen, - includeFilter in graphqlCodegen, - excludeFilter in graphqlCodegen) + includeFilter in graphqlCodegen, + excludeFilter in graphqlCodegen) .value, sourceGenerators in Compile += Def.task { Seq(graphqlCodegen.value) }, graphqlCodegenPackage := "graphql.codegen", @@ -44,23 +44,23 @@ object GraphQLCodegenPlugin extends AutoPlugin { log.info(s"Use schema $schema for query validation") val builder = - if (schema.getName.endsWith(".json")) - Builder(SchemaLoader.fromFile(schema).loadSchema()) - else - Builder(schema) + if (schema.getName.endsWith(".json")) + Builder(SchemaLoader.fromFile(schema).loadSchema()) + else + Builder(schema) val result = builder - .withQuery(queries: _*) - .generate(generator) - .map { code => - IO.createDirectory(output.getParentFile) - IO.writeLines(output, - List(s"package $packageName", code.show[Syntax])) - } + .withQuery(queries: _*) + .generate(generator) + .map { code => + IO.createDirectory(output.getParentFile) + IO.writeLines(output, + List(s"package $packageName", code.show[Syntax])) + } result match { - case Left(error) => sys.error(s"Failed to generate code: $error") - case Right(()) => output + case Left(error) => sys.error(s"Failed to generate code: $error") + case Right(()) => output } } ) diff --git a/src/main/scala/rocks/muki/graphql/GraphQLPlugin.scala b/src/main/scala/rocks/muki/graphql/GraphQLPlugin.scala new file mode 100644 index 0000000..49b8e37 --- /dev/null +++ b/src/main/scala/rocks/muki/graphql/GraphQLPlugin.scala @@ -0,0 +1,75 @@ +package rocks.muki.graphql + +import sbt.Keys._ +import sbt._ +import rocks.muki.graphql.schema.{GraphQLSchemas, SchemaLoader} + +/** + * == GraphQL Plugin == + * + * Root plugin for all other graphql plugins. Provides a schema registry that can be used for + * + * - validating queries against a specific schema + * - comparing schemas + * - code generation based on a specific schema + * + */ +object GraphQLPlugin extends AutoPlugin { + + object autoImport { + + /** + * Helper to load schemas from different places + */ + val GraphQLSchemaLoader: SchemaLoader.type = + rocks.muki.graphql.schema.SchemaLoader + + val GraphQLSchema: rocks.muki.graphql.schema.GraphQLSchema.type = + rocks.muki.graphql.schema.GraphQLSchema + + /** + * Contains all schemas available in this build. + * + * @example Adding a new schema + * {{{ + * graphqlSchemas += GraphQLSchema( + * "temporary", + * "schema loaded from schema.json in the base directory", + * SchemaLoader.fromFile(baseDirectory.value / "schema.json")), + * }}} + * + */ + val graphqlSchemas: SettingKey[GraphQLSchemas] = + settingKey[GraphQLSchemas]("all schemas available in this build") + + /** + * Renders the given schema into a graphql file. + * The input is the label in the graphqlSchemas setting. + */ + val graphqlRenderSchema: InputKey[File] = + inputKey[File]("renders the given schema to a graphql file") + + } + import autoImport._ + + override def projectSettings: Seq[Setting[_]] = Seq( + graphqlSchemas := GraphQLSchemas(), + // schema rendering + target in graphqlRenderSchema := (target in Compile).value / "graphql", + graphqlRenderSchema := graphqlRenderSchemaTask.evaluated + ) + + private val graphqlRenderSchemaTask = Def.inputTaskDyn[File] { + val log = streams.value.log + val schemaDefinition = singleGraphQLSchemaParser.parsed + val file = (target in graphqlRenderSchema).value / s"${schemaDefinition.label}.graphql" + log.info(s"Rendering schema to: ${file.getPath}") + + Def.task { + val schema = schemaDefinition.schemaTask.value + IO.write(file, schema.renderPretty) + file + } + } + +} diff --git a/src/main/scala/rocks/muki/graphql/GraphQLQueryPlugin.scala b/src/main/scala/rocks/muki/graphql/GraphQLQueryPlugin.scala index 7d55d58..cce3b73 100644 --- a/src/main/scala/rocks/muki/graphql/GraphQLQueryPlugin.scala +++ b/src/main/scala/rocks/muki/graphql/GraphQLQueryPlugin.scala @@ -17,58 +17,67 @@ object GraphQLQueryPlugin extends AutoPlugin { */ val graphqlValidateQueries: TaskKey[Unit] = taskKey[Unit]("validate all queries in the graphql source directory") + + val graphqlQueryDirectory: SettingKey[File] = + settingKey[File]("graphql files") } import autoImport._ import GraphQLSchemaPlugin.autoImport._ - override def projectSettings: Seq[Setting[_]] = Seq( - sourceDirectory in (Compile, graphqlValidateQueries) := (sourceDirectory in Compile).value / "graphql", - graphqlValidateQueries := { - val log = streams.value.log - val schemaFile = IO.read(graphqlSchemaGen.value) - val schemaDocument = QueryParser - .parse(schemaFile) - .getOrElse( - sys.error( - "Invalid graphql schema generated by `graphqlSchemaGen` task") - ) - val schema = Schema.buildFromAst(schemaDocument) + // TODO separate these into two auto plugins + override def projectSettings: Seq[Setting[_]] = + pluginSettings(Compile) ++ pluginSettings(IntegrationTest) - val src = (sourceDirectory in (Compile, graphqlValidateQueries)).value - val graphqlFiles = (src ** "*.graphql").get - val violations = graphqlFiles.flatMap { - file => - log.info(s"Validate ${file.getPath}") - val query = IO.read(file) - val violations = QueryParser - .parse(query) - .fold( - error => Vector(InvalidQueryValidation(error)), - query => QueryValidator.default.validateQuery(schema, query) + private def pluginSettings(config: Configuration): Seq[Setting[_]] = + inConfig(config)( + Seq( + graphqlQueryDirectory := (sourceDirectory in Compile).value / "graphql", + graphqlValidateQueries := { + val log = streams.value.log + val schemaFile = IO.read(graphqlSchemaGen.value) + val schemaDocument = QueryParser + .parse(schemaFile) + .getOrElse( + sys.error( + "Invalid graphql schema generated by `graphqlSchemaGen` task") ) - if (violations.nonEmpty) { - log.error(s"File: ${file.getAbsolutePath}") - log.error("## Query ##") - log.error(query) - log.error("## Violations ##") - violations.foreach(v => log.error(v.errorMessage)) - List(QueryViolations(file, query, violations)) - } else { - Nil + val schema = Schema.buildFromAst(schemaDocument) + + log.info(s"Checking graphl files in ${graphqlQueryDirectory.value}") + val graphqlFiles = (graphqlQueryDirectory.value ** "*.graphql").get + val violations = graphqlFiles.flatMap { + file => + log.info(s"Validate ${file.getPath}") + val query = IO.read(file) + val violations = QueryParser + .parse(query) + .fold( + error => Vector(InvalidQueryValidation(error)), + query => QueryValidator.default.validateQuery(schema, query) + ) + if (violations.nonEmpty) { + log.error(s"File: ${file.getAbsolutePath}") + log.error("## Query ##") + log.error(query) + log.error("## Violations ##") + violations.foreach(v => log.error(v.errorMessage)) + List(QueryViolations(file, query, violations)) + } else { + Nil + } } - } - if (violations.nonEmpty) { - log.error("Validation errors in") - violations.foreach { queryViolation => - log.error(s"File: ${queryViolation.file.getAbsolutePath}") + if (violations.nonEmpty) { + log.error("Validation errors in") + violations.foreach { queryViolation => + log.error(s"File: ${queryViolation.file.getAbsolutePath}") + } + quietError("Some queries contain validation violations") + } + log.success(s"All ${graphqlFiles.size} graphql files are valid") } - quietError("Some queries contain validation violations") - } - log.success(s"All ${graphqlFiles.size} graphql files are valid") - } - ) + )) /** * Aggregates violations for a single file diff --git a/src/main/scala/rocks/muki/graphql/GraphQLSchemaPlugin.scala b/src/main/scala/rocks/muki/graphql/GraphQLSchemaPlugin.scala index 6b62d90..e60c010 100644 --- a/src/main/scala/rocks/muki/graphql/GraphQLSchemaPlugin.scala +++ b/src/main/scala/rocks/muki/graphql/GraphQLSchemaPlugin.scala @@ -1,16 +1,14 @@ package rocks.muki.graphql -import sangria.ast.Document +import rocks.muki.graphql.schema.SchemaLoader import sangria.schema._ import sbt._ import sbt.Keys._ -import complete.{FixedSetExamples, Parser} -import complete.DefaultParsers._ -import rocks.muki.graphql.releasenotes.MarkdownReleaseNotes -import rocks.muki.graphql.schema.{GraphQLSchemas, SchemaLoader} object GraphQLSchemaPlugin extends AutoPlugin { + override val requires: Plugins = GraphQLPlugin + // The main class for the schema generator class private val mainClass = "SchemaGen" // The package for the schema generated class @@ -18,43 +16,6 @@ object GraphQLSchemaPlugin extends AutoPlugin { object autoImport { - /** - * Helper to load schemas from different places - */ - val GraphQLSchemaLoader: SchemaLoader.type = - rocks.muki.graphql.schema.SchemaLoader - - val GraphQLSchema: rocks.muki.graphql.schema.GraphQLSchema.type = - rocks.muki.graphql.schema.GraphQLSchema - - object GraphQLSchemaLabels { - - /** - * Label for the schema generated by the project build - */ - val BUILD: String = "build" - - /** - * Label for the production schema - */ - val PROD: String = "prod" - } - - /** - * Contains all schemas available in this build. - * - * @example Adding a new schema - * {{{ - * graphqlSchemas += GraphQLSchema( - * "temporary", - * "schema loaded from schema.json in the base directory", - * SchemaLoader.fromFile(baseDirectory.value / "schema.json")), - * }}} - * - */ - val graphqlSchemas: SettingKey[GraphQLSchemas] = - settingKey[GraphQLSchemas]("all schemas available in this build") - /** * A scala snippet that returns the [[sangria.schema.Schema]] for your graphql application. * @@ -72,13 +33,6 @@ object GraphQLSchemaPlugin extends AutoPlugin { val graphqlSchemaGen: TaskKey[File] = taskKey[File]("generates a graphql schema file") - /** - * Renders the given schema into a graphql file. - * The input is the label in the graphqlSchemas setting. - */ - val graphqlRenderSchema: InputKey[File] = - inputKey[File]("renders the given schema to a graphql file") - /** * Returns the changes between the two schemas defined as parameters. * @@ -94,12 +48,6 @@ object GraphQLSchemaPlugin extends AutoPlugin { val graphqlSchemaChanges: InputKey[Vector[SchemaChange]] = inputKey[Vector[SchemaChange]]("compares two schemas") - /** - * The currently active / deployed graphql schema. - */ - val graphqlProductionSchema: TaskKey[Schema[Any, Any]] = - taskKey[Schema[Any, Any]]("Graphql schema from your production system") - /** * Validates the new schema against existing queries and the production schema */ @@ -114,10 +62,10 @@ object GraphQLSchemaPlugin extends AutoPlugin { } import autoImport._ + import GraphQLPlugin.autoImport._ override def projectSettings: Seq[Setting[_]] = Seq( graphqlSchemaSnippet := """sys.error("Configure the `graphqlSchemaSnippet` setting with the correct scala code snippet to access your sangria schema")""", - graphqlProductionSchema := Schema.buildFromAst(Document.emptyStub), graphqlSchemaChanges := graphqlSchemaChangesTask.evaluated, target in graphqlSchemaGen := (target in Compile).value / "sbt-graphql", graphqlSchemaGen := { @@ -131,36 +79,18 @@ object GraphQLSchemaPlugin extends AutoPlugin { streams.value.log.info(s"Generating schema in $schemaFile") schemaFile }, - graphqlSchemas := GraphQLSchemas(), - graphqlSchemas += GraphQLSchema( - GraphQLSchemaLabels.BUILD, + // add the schema produced by this build to the graphqlSchemas + (name in graphqlSchemaGen) := "build", + graphqlSchemas += schema.GraphQLSchema( + (name in graphqlSchemaGen).value, "schema generated by this build (graphqlSchemaGen task)", graphqlSchemaGen.map(SchemaLoader.fromFile(_).loadSchema()).taskValue ), - graphqlSchemas += GraphQLSchema( - GraphQLSchemaLabels.PROD, - "schema generated by the graphqlProductionSchema task", - graphqlProductionSchema.taskValue), graphqlValidateSchema := graphqlValidateSchemaTask.evaluated, graphqlReleaseNotes := (new MarkdownReleaseNotes) .generateReleaseNotes(graphqlSchemaChanges.evaluated), // Generates a small snippet that generates a graphql schema - sourceGenerators in Compile += generateSchemaGeneratorClass(), - graphqlSchemas += GraphQLSchema( - "staging", - "staging schema at staging.your-graphql.net/graphql", - Def - .task( - GraphQLSchemaLoader - .fromIntrospection("http://staging.your-graphql.net/graphql", - streams.value.log) - .loadSchema() - ) - .taskValue - ), - // schema rendering - target in graphqlRenderSchema := (target in Compile).value / "graphql", - graphqlRenderSchema := graphqlRenderSchemaTask.evaluated + sourceGenerators in Compile += generateSchemaGeneratorClass() ) /** @@ -196,44 +126,16 @@ object GraphQLSchemaPlugin extends AutoPlugin { Seq(file) } - /** - * @param labels list of available schemas by label - * @return a parser for the given labels - */ - private def schemaLabelParser(labels: Iterable[String]): Parser[String] = { - val schemaParser = StringBasic.examples(FixedSetExamples(labels)) - token(Space ~> schemaParser) - } - - private val singleSchemaLabelParser: Def.Initialize[Parser[String]] = - Def.setting { - val labels = graphqlSchemas.value.schemas.map(_.label) - // create a dependent parser. A label can only be selected once - schemaLabelParser(labels) - } - - /** - * Parses two schema labels - */ - private val graphqlSchemaChangesParser - : Def.Initialize[Parser[(String, String)]] = Def.setting { - val labels = graphqlSchemas.value.schemas.map(_.label) - // create a depened parser. A label can only be selected once - schemaLabelParser(labels).flatMap { selectedLabel => - success(selectedLabel) ~ schemaLabelParser( - labels.filterNot(_ == selectedLabel)) - } - } - private val graphqlSchemaChangesTask = Def.inputTaskDyn { val log = streams.value.log - val (oldSchemaLabel, newSchemaLabel) = graphqlSchemaChangesParser.parsed + val (oldSchemaDefinition, newSchemaDefinition) = + tupleGraphQLSchemaParser.parsed - val schemas = graphqlSchemas.value.schemaByLabel Def.task { - val newSchema = schemas(newSchemaLabel).schemaTask.value - val oldSchema = schemas(oldSchemaLabel).schemaTask.value - log.info(s"Comparing $oldSchemaLabel with $newSchemaLabel schema") + val newSchema = newSchemaDefinition.schemaTask.value + val oldSchema = oldSchemaDefinition.schemaTask.value + log.info( + s"Comparing ${oldSchemaDefinition.label} with ${newSchemaDefinition.label} schema") oldSchema compare newSchema } } @@ -248,22 +150,4 @@ object GraphQLSchemaPlugin extends AutoPlugin { } } - private val graphqlRenderSchemaTask = Def.inputTaskDyn[File] { - val log = streams.value.log - val label = singleSchemaLabelParser.parsed - val file = (target in graphqlRenderSchema).value / s"$label.graphql" - val schemaDefinition = graphqlSchemas.value.schemaByLabel.getOrElse( - label, - sys.error(s"The schema '$label' is not defined in graphqlSchemas") - ) - log.info(s"Rendering schema to: ${file.getPath}") - - Def.task { - val schema = schemaDefinition.schemaTask.value - IO.write(file, schema.renderPretty) - file - } - - } - } diff --git a/src/main/scala/rocks/muki/graphql/codegen/Builder.scala b/src/main/scala/rocks/muki/graphql/codegen/Builder.scala index 44ff700..2c6f4c1 100644 --- a/src/main/scala/rocks/muki/graphql/codegen/Builder.scala +++ b/src/main/scala/rocks/muki/graphql/codegen/Builder.scala @@ -31,13 +31,13 @@ case class Builder private ( private def withQuery(query: => Result[Document]): Builder = { val validatedQuery = schema.flatMap { validSchema => query.flatMap { loadedQuery => - val violations = - QueryValidator.default.validateQuery(validSchema, loadedQuery) - if (violations.isEmpty) - query - else - Left(Failure( - s"Invalid query: ${violations.map(_.errorMessage).mkString(", ")}")) + val violations = + QueryValidator.default.validateQuery(validSchema, loadedQuery) + if (violations.isEmpty) + query + else + Left(Failure( + s"Invalid query: ${violations.map(_.errorMessage).mkString(", ")}")) } } @@ -53,7 +53,7 @@ case class Builder private ( def withQuery(queryFiles: File*): Builder = queryFiles.foldLeft(this) { case (builder, file) => - builder.withQuery(Builder.parseDocument(file)) + builder.withQuery(Builder.parseDocument(file)) } def generate[T](implicit generator: Generator[T]): Result[T] = @@ -75,19 +75,19 @@ object Builder { for { document <- parseDocument(file) schema <- Either.catchNonFatal(Schema.buildFromAst(document)).leftMap { - error => - Failure(s"Failed to read schema $file: ${error.getMessage}") + error => + Failure(s"Failed to read schema $file: ${error.getMessage}") } } yield schema private def parseDocument(file: File): Result[Document] = for { input <- Either.catchNonFatal(Source.fromFile(file).mkString).leftMap { - error => - Failure(s"Failed to read $file: ${error.getMessage}") + error => + Failure(s"Failed to read $file: ${error.getMessage}") } document <- Either.fromTry(QueryParser.parse(input)).leftMap { error => - Failure(s"Failed to parse $file: ${error.getMessage}") + Failure(s"Failed to parse $file: ${error.getMessage}") } } yield document } diff --git a/src/main/scala/rocks/muki/graphql/codegen/Importer.scala b/src/main/scala/rocks/muki/graphql/codegen/Importer.scala index 0d0e1c1..722a296 100644 --- a/src/main/scala/rocks/muki/graphql/codegen/Importer.scala +++ b/src/main/scala/rocks/muki/graphql/codegen/Importer.scala @@ -28,9 +28,9 @@ case class Importer(schema: Schema[_, _], document: ast.Document) { def parse(): Result[Tree.Api] = Right( Tree.Api( - document.operations.values.map(generateOperation).toVector, - document.fragments.values.toVector.map(generateFragment), - schema.typeList.filter(types).collect(generateType) + document.operations.values.map(generateOperation).toVector, + document.fragments.values.toVector.map(generateFragment), + schema.typeList.filter(types).collect(generateType) )) /** @@ -68,52 +68,52 @@ case class Importer(schema: Schema[_, _], document: ast.Document) { node: ast.Selection): Tree.Selection = { def conditionalFragment(f: => Tree.Selection): Tree.Selection = if (typeConditions.isEmpty || typeConditions(typeInfo.tpe.get)) - f + f else - Tree.Selection.empty + Tree.Selection.empty typeInfo.enter(node) val result = node match { case field: ast.Field => - require(typeInfo.tpe.isDefined, s"Field without type: $field") - val tpe = typeInfo.tpe.get - tpe.namedType match { - case union: UnionType[_] => - val types = union.types.toList.map { tpe => - // Prepend the union type name to include and descend into fragment spreads - val conditions = Set[Type](union, tpe) ++ tpe.interfaces - val selection = generateSelections(field.selections, conditions) - Tree.UnionSelection(tpe, selection) - } - Tree.Selection(Tree.Field(field.outputName, tpe, union = types)) - - case obj @ (_: ObjectLikeType[_, _] | _: InputObjectType[_]) => - val gen = generateSelections(field.selections) - Tree.Selection( - Tree.Field(field.outputName, tpe, selection = Some(gen))) - - case _ => - touchType(tpe) - Tree.Selection(Tree.Field(field.outputName, tpe)) - } + require(typeInfo.tpe.isDefined, s"Field without type: $field") + val tpe = typeInfo.tpe.get + tpe.namedType match { + case union: UnionType[_] => + val types = union.types.toList.map { tpe => + // Prepend the union type name to include and descend into fragment spreads + val conditions = Set[Type](union, tpe) ++ tpe.interfaces + val selection = generateSelections(field.selections, conditions) + Tree.UnionSelection(tpe, selection) + } + Tree.Selection(Tree.Field(field.outputName, tpe, union = types)) + + case obj @ (_: ObjectLikeType[_, _] | _: InputObjectType[_]) => + val gen = generateSelections(field.selections) + Tree.Selection( + Tree.Field(field.outputName, tpe, selection = Some(gen))) + + case _ => + touchType(tpe) + Tree.Selection(Tree.Field(field.outputName, tpe)) + } case fragmentSpread: ast.FragmentSpread => - val name = fragmentSpread.name - val fragment = document.fragments(fragmentSpread.name) - // Sangria's TypeInfo abstraction does not resolve fragment spreads - // when traversing, so explicitly enter resolved fragment. - typeInfo.enter(fragment) - val result = conditionalFragment( - generateSelections(fragment.selections, typeConditions) - .copy(interfaces = Vector(name))) - typeInfo.leave(fragment) - result + val name = fragmentSpread.name + val fragment = document.fragments(fragmentSpread.name) + // Sangria's TypeInfo abstraction does not resolve fragment spreads + // when traversing, so explicitly enter resolved fragment. + typeInfo.enter(fragment) + val result = conditionalFragment( + generateSelections(fragment.selections, typeConditions) + .copy(interfaces = Vector(name))) + typeInfo.leave(fragment) + result case inlineFragment: ast.InlineFragment => - conditionalFragment(generateSelections(inlineFragment.selections)) + conditionalFragment(generateSelections(inlineFragment.selections)) case unknown => - sys.error("Unknown selection: " + unknown.toString) + sys.error("Unknown selection: " + unknown.toString) } typeInfo.leave(node) result @@ -123,11 +123,11 @@ case class Importer(schema: Schema[_, _], document: ast.Document) { typeInfo.enter(operation) val variables = operation.variables.toVector.map { varDef => schema.getInputType(varDef.tpe) match { - case Some(tpe) => - touchType(tpe) - Tree.Field(varDef.name, tpe) - case None => - sys.error("Unknown input type: " + varDef.tpe) + case Some(tpe) => + touchType(tpe) + Tree.Field(varDef.name, tpe) + case None => + sys.error("Unknown input type: " + varDef.tpe) } } @@ -154,8 +154,8 @@ case class Importer(schema: Schema[_, _], document: ast.Document) { def generateType: PartialFunction[Type, Tree.Type] = { case interface: InterfaceType[_, _] => val fields = interface.uniqueFields.map { field => - touchType(field.fieldType) - Tree.Field(field.name, field.fieldType) + touchType(field.fieldType) + Tree.Field(field.name, field.fieldType) } Tree.Interface(interface.name, fields) @@ -171,8 +171,8 @@ case class Importer(schema: Schema[_, _], document: ast.Document) { case inputObj: InputObjectType[_] => val fields = inputObj.fields.map { field => - touchType(field.fieldType) - Tree.Field(field.name, field.fieldType) + touchType(field.fieldType) + Tree.Field(field.name, field.fieldType) } Tree.Object(inputObj.name, fields) diff --git a/src/main/scala/rocks/muki/graphql/codegen/ScalaSourceGenerator.scala b/src/main/scala/rocks/muki/graphql/codegen/ScalaSourceGenerator.scala index 26fa425..8a4b5f7 100644 --- a/src/main/scala/rocks/muki/graphql/codegen/ScalaSourceGenerator.scala +++ b/src/main/scala/rocks/muki/graphql/codegen/ScalaSourceGenerator.scala @@ -24,17 +24,17 @@ import sangria.schema * Generate code using Scalameta. */ case class ScalametaGenerator(moduleName: Term.Name, - emitInterfaces: Boolean = false, - stats: Seq[Stat] = Vector.empty) + emitInterfaces: Boolean = false, + stats: Seq[Stat] = Vector.empty) extends Generator[Defn.Object] { override def apply(api: Tree.Api): Result[Defn.Object] = { val operations = api.operations.flatMap(generateOperation) val fragments = if (emitInterfaces) - api.interfaces.map(generateInterface) + api.interfaces.map(generateInterface) else - Seq.empty + Seq.empty val types = api.types.flatMap(generateType) Right( @@ -53,7 +53,7 @@ case class ScalametaGenerator(moduleName: Term.Name, Term.Param(Vector.empty, Term.Name(paramName), Some(tpe), None) def generateTemplate(traits: Seq[String], - prefix: String = moduleName.value + "."): Template = { + prefix: String = moduleName.value + "."): Template = { val ctorNames = traits.map(prefix + _).map(Ctor.Name.apply) val emptySelf = Term.Param(Vector.empty, Name.Anonymous(), None, None) Template(Nil, ctorNames, emptySelf, None) @@ -63,17 +63,17 @@ case class ScalametaGenerator(moduleName: Term.Name, genType: schema.Type => Type): Type = { def typeOf(tpe: schema.Type): Type = tpe match { case schema.OptionType(wrapped) => - t"Option[${typeOf(wrapped)}]" + t"Option[${typeOf(wrapped)}]" case schema.OptionInputType(wrapped) => - t"Option[${typeOf(wrapped)}]" + t"Option[${typeOf(wrapped)}]" case schema.ListType(wrapped) => - t"List[${typeOf(wrapped)}]" + t"List[${typeOf(wrapped)}]" case schema.ListInputType(wrapped) => - t"List[${typeOf(wrapped)}]" + t"List[${typeOf(wrapped)}]" case tpe: schema.ScalarType[_] if tpe == schema.IDType => - Type.Name(moduleName.value + ".ID") + Type.Name(moduleName.value + ".ID") case tpe: schema.Type => - genType(tpe) + genType(tpe) } typeOf(field.tpe) } @@ -81,71 +81,71 @@ case class ScalametaGenerator(moduleName: Term.Name, def generateOperation(operation: Tree.Operation): Seq[Stat] = { def fieldType(field: Tree.Field, prefix: String = ""): Type = generateFieldType(field) { tpe => - if (field.isObjectLike || field.isUnion) - Type.Name(prefix + field.name.capitalize) - else - Type.Name(tpe.namedType.name) + if (field.isObjectLike || field.isUnion) + Type.Name(prefix + field.name.capitalize) + else + Type.Name(tpe.namedType.name) } def generateSelectionParams(prefix: String)( - selection: Tree.Selection): Seq[Term.Param] = + selection: Tree.Selection): Seq[Term.Param] = selection.fields.map { field => - val tpe = fieldType(field, prefix) - termParam(field.name, tpe) + val tpe = fieldType(field, prefix) + termParam(field.name, tpe) } def generateSelectionStats(prefix: String)( - selection: Tree.Selection): Seq[Stat] = + selection: Tree.Selection): Seq[Stat] = selection.fields.flatMap { - case Tree.Field(name, tpe, None, unionTypes) if unionTypes.nonEmpty => - val unionName = Type.Name(name.capitalize) - val objectName = Term.Name(unionName.value) - val template = generateTemplate(Seq(unionName.value), prefix) - val unionValues = unionTypes.flatMap { - case Tree.UnionSelection(tpe, selection) => - val path = prefix + unionName.value + "." + tpe.name + "." - val stats = generateSelectionStats(path)(selection) - val params = generateSelectionParams(path)(selection) - val tpeName = Type.Name(tpe.name) - val termName = Term.Name(tpe.name) - - Vector(q"case class $tpeName(..$params) extends $template") ++ - Option(stats) - .filter(_.nonEmpty) - .map { stats => - q"object $termName { ..$stats }" - } - .toVector - } - - Vector[Stat]( - q"sealed trait $unionName", - q"object $objectName { ..$unionValues }" - ) - - case Tree.Field(name, tpe, Some(selection), _) => - val stats = - generateSelectionStats(prefix + name.capitalize + ".")(selection) - val params = - generateSelectionParams(prefix + name.capitalize + ".")(selection) - - val tpeName = Type.Name(name.capitalize) - val termName = Term.Name(name.capitalize) - val interfaces = - if (emitInterfaces) selection.interfaces - else Seq.empty - val template = generateTemplate(interfaces) - - Vector(q"case class $tpeName(..$params) extends $template") ++ - Option(stats) - .filter(_.nonEmpty) - .map { stats => - q"object $termName { ..$stats }" - } - .toVector - - case Tree.Field(_, _, _, _) => - Vector.empty + case Tree.Field(name, tpe, None, unionTypes) if unionTypes.nonEmpty => + val unionName = Type.Name(name.capitalize) + val objectName = Term.Name(unionName.value) + val template = generateTemplate(Seq(unionName.value), prefix) + val unionValues = unionTypes.flatMap { + case Tree.UnionSelection(tpe, selection) => + val path = prefix + unionName.value + "." + tpe.name + "." + val stats = generateSelectionStats(path)(selection) + val params = generateSelectionParams(path)(selection) + val tpeName = Type.Name(tpe.name) + val termName = Term.Name(tpe.name) + + Vector(q"case class $tpeName(..$params) extends $template") ++ + Option(stats) + .filter(_.nonEmpty) + .map { stats => + q"object $termName { ..$stats }" + } + .toVector + } + + Vector[Stat]( + q"sealed trait $unionName", + q"object $objectName { ..$unionValues }" + ) + + case Tree.Field(name, tpe, Some(selection), _) => + val stats = + generateSelectionStats(prefix + name.capitalize + ".")(selection) + val params = + generateSelectionParams(prefix + name.capitalize + ".")(selection) + + val tpeName = Type.Name(name.capitalize) + val termName = Term.Name(name.capitalize) + val interfaces = + if (emitInterfaces) selection.interfaces + else Seq.empty + val template = generateTemplate(interfaces) + + Vector(q"case class $tpeName(..$params) extends $template") ++ + Option(stats) + .filter(_.nonEmpty) + .map { stats => + q"object $termName { ..$stats }" + } + .toVector + + case Tree.Field(_, _, _, _) => + Vector.empty } val variables = operation.variables.map { varDef => @@ -176,12 +176,12 @@ case class ScalametaGenerator(moduleName: Term.Name, val defs = interface.fields.map { field => val fieldName = Term.Name(field.name) val tpe = generateFieldType(field) { tpe => - field.selection.map(_.interfaces).filter(_.nonEmpty) match { - case Some(interfaces) => - interfaces.map(x => Type.Name(x): Type).reduce(Type.With(_, _)) - case None => - Type.Name(tpe.namedType.name) - } + field.selection.map(_.interfaces).filter(_.nonEmpty) match { + case Some(interfaces) => + interfaces.map(x => Type.Name(x): Type).reduce(Type.With(_, _)) + case None => + Type.Name(tpe.namedType.name) + } } q"def $fieldName: $tpe" } @@ -202,25 +202,25 @@ case class ScalametaGenerator(moduleName: Term.Name, def generateType(tree: Tree.Type): Seq[Stat] = tree match { case interface: Tree.Interface => if (emitInterfaces) - Vector(generateInterface(interface)) + Vector(generateInterface(interface)) else - Vector.empty + Vector.empty case obj: Tree.Object => Vector(generateObject(obj, Seq.empty)) case Tree.Enum(name, values) => val enumValues = values.map { value => - val template = generateTemplate(Seq(name)) - val valueName = Term.Name(value) - q"case object $valueName extends $template" + val template = generateTemplate(Seq(name)) + val valueName = Term.Name(value) + q"case object $valueName extends $template" } val enumName = Type.Name(name) val objectName = Term.Name(name) Vector[Stat]( - q"sealed trait $enumName", - q"object $objectName { ..$enumValues }" + q"sealed trait $enumName", + q"object $objectName { ..$enumValues }" ) case Tree.TypeAlias(from, to) => @@ -233,8 +233,8 @@ case class ScalametaGenerator(moduleName: Term.Name, val unionName = Type.Name(name) val objectName = Term.Name(name) Vector[Stat]( - q"sealed trait $unionName", - q"object $objectName { ..$unionValues }" + q"sealed trait $unionName", + q"object $objectName { ..$unionValues }" ) } diff --git a/src/main/scala/rocks/muki/graphql/codegen/Tree.scala b/src/main/scala/rocks/muki/graphql/codegen/Tree.scala index 462ae67..6dbcce6 100644 --- a/src/main/scala/rocks/muki/graphql/codegen/Tree.scala +++ b/src/main/scala/rocks/muki/graphql/codegen/Tree.scala @@ -25,20 +25,20 @@ import sangria.schema sealed trait Tree object Tree { case class Field(name: String, - tpe: schema.Type, - selection: Option[Selection] = None, - union: Seq[UnionSelection] = Seq.empty) + tpe: schema.Type, + selection: Option[Selection] = None, + union: Seq[UnionSelection] = Seq.empty) extends Tree { def isObjectLike = selection.nonEmpty def isUnion = union.nonEmpty } case class Selection(fields: Seq[Field], - interfaces: Seq[String] = Vector.empty) + interfaces: Seq[String] = Vector.empty) extends Tree { def +(that: Selection) = Selection((this.fields ++ that.fields).distinct, - this.interfaces ++ that.interfaces) + this.interfaces ++ that.interfaces) } object Selection { final val empty = Selection(Vector.empty) @@ -53,8 +53,8 @@ object Tree { * Operations represent API calls and are the entry points to the API. */ case class Operation(name: Option[String], - variables: Seq[Field], - selection: Selection) + variables: Seq[Field], + selection: Selection) extends Tree /** @@ -76,6 +76,6 @@ object Tree { * referenced in the query documents. */ case class Api(operations: Seq[Operation], - interfaces: Seq[Interface], - types: Seq[Type]) + interfaces: Seq[Interface], + types: Seq[Type]) } diff --git a/src/main/scala/rocks/muki/graphql/package.scala b/src/main/scala/rocks/muki/graphql/package.scala index 14a00c7..d9517d7 100644 --- a/src/main/scala/rocks/muki/graphql/package.scala +++ b/src/main/scala/rocks/muki/graphql/package.scala @@ -1,10 +1,66 @@ package rocks.muki +import rocks.muki.graphql.GraphQLPlugin.autoImport.graphqlSchemas +import rocks.muki.graphql.schema.{GraphQLSchema, GraphQLSchemas} +import sbt._ +import sbt.complete.DefaultParsers._ +import sbt.complete.{FixedSetExamples, Parser} + package object graphql { + /** + * Throw an exception without a stacktrace. + * + * @param msg the error message + * @return nothing - throws an exception + */ def quietError(msg: String): Nothing = { val exc = new RuntimeException(msg) exc.setStackTrace(Array.empty) throw exc } + + /** + * @return a parser that parses exactly one schema l + */ + val singleGraphQLSchemaParser: Def.Initialize[Parser[GraphQLSchema]] = + Def.setting { + val gqlSchema = graphqlSchemas.value + val labels = gqlSchema.schemas.map(_.label) + // create a dependent parser. A label can only be selected once + schemaLabelParser(labels).map(label => schemaOrError(label, gqlSchema)) + } + + /** + * Parses two schema labels + */ + val tupleGraphQLSchemaParser + : Def.Initialize[Parser[(GraphQLSchema, GraphQLSchema)]] = + Def.setting { + val gqlSchemas = graphqlSchemas.value + val labels = gqlSchemas.schemas.map(_.label) + // create a depended parser. A label can only be selected once + schemaLabelParser(labels).flatMap { selectedLabel => + success(schemaOrError(selectedLabel, gqlSchemas)) ~ schemaLabelParser( + labels.filterNot(_ == selectedLabel)).map(label => + schemaOrError(label, gqlSchemas)) + } + } + + /** + * @param labels list of available schemas by label + * @return a parser for the given labels + */ + private[this] def schemaLabelParser( + labels: Iterable[String]): Parser[String] = { + val schemaParser = StringBasic.examples(FixedSetExamples(labels)) + token(Space.? ~> schemaParser) + } + + private def schemaOrError(label: String, + graphQLSchema: GraphQLSchemas): GraphQLSchema = + graphQLSchema.schemaByLabel.getOrElse( + label, + sys.error(s"The schema '$label' is not defined in graphqlSchemas")) + }