Skip to content

Commit

Permalink
#1410 Add parent to jobs
Browse files Browse the repository at this point in the history
  • Loading branch information
To-om committed Jul 15, 2020
1 parent 5314e32 commit 6439769
Show file tree
Hide file tree
Showing 5 changed files with 93 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ import org.thp.scalligraph.controllers.Renderer
import org.thp.scalligraph.models.Entity
import org.thp.thehive.connector.cortex.dto.v0._
import org.thp.thehive.connector.cortex.models._
import play.api.libs.json.{JsArray, JsFalse, Json}

import org.thp.thehive.dto.v0.OutputObservable
import org.thp.thehive.models.{RichCase, RichObservable}
import play.api.libs.json.{JsArray, JsFalse, JsObject, Json}
object Conversion {
import org.thp.thehive.controllers.v0.Conversion._

Expand Down Expand Up @@ -42,11 +43,44 @@ object Conversion {
case r => r + ("success" -> JsFalse)
}
)
.withFieldConst(_.id, job._id)
.withFieldRenamed(_._id, _.id)
.withFieldConst(_._type, "case_artifact_job")
.withFieldConst(_.case_artifact, None)
.transform
)

implicit val jobWithParentOutput: Renderer.Aux[(RichJob, Option[(RichObservable, RichCase)]), OutputJob] =
Renderer.json[(RichJob, Option[(RichObservable, RichCase)]), OutputJob] { jobWithParent =>
jobWithParent
._1
.into[OutputJob]
.withFieldComputed(_.analyzerId, _.workerId)
.withFieldComputed(_.analyzerName, _.workerName)
.withFieldComputed(_.analyzerDefinition, _.workerDefinition)
.withFieldComputed(_.status, _.status.toString)
.withFieldComputed(_.endDate, _.endDate)
.withFieldComputed(_.cortexId, _.cortexId)
.withFieldComputed(_.cortexJobId, _.cortexJobId)
.withFieldComputed(
_.report,
j =>
j.report.map {
case r if j.status == JobStatus.Success => Json.obj("success" -> true, "full" -> r, "artifacts" -> j.observables.map(_.toJson))
case r => r + ("success" -> JsFalse)
}
)
.withFieldRenamed(_._id, _.id)
.withFieldConst(_._type, "case_artifact_job")
.withFieldConst(
_.case_artifact,
jobWithParent._2.fold[Option[OutputObservable]](None) {
case (richObservable, richCase) =>
Some(observableWithExtraOutput.toValue((richObservable, JsObject.empty, Some(richCase))))
}
)
.transform
}

implicit val analyzerTemplateOutput: Renderer.Aux[AnalyzerTemplate with Entity, OutputAnalyzerTemplate] =
Renderer.json[AnalyzerTemplate with Entity, OutputAnalyzerTemplate](at =>
at.asInstanceOf[AnalyzerTemplate]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import org.thp.thehive.connector.cortex.models.RichJob
import org.thp.thehive.connector.cortex.services.{JobSrv, JobSteps}
import org.thp.thehive.controllers.v0.Conversion._
import org.thp.thehive.controllers.v0.{IdOrName, OutputParam, QueryableCtrl}
import org.thp.thehive.models.Permissions
import org.thp.thehive.models.{Permissions, RichCase, RichObservable}
import org.thp.thehive.services.ObservableSrv
import play.api.Logger
import play.api.mvc.{Action, AnyContent, Results}
Expand All @@ -29,7 +29,8 @@ class JobCtrl @Inject() (
observableSrv: ObservableSrv,
errorHandler: ErrorHandler,
implicit val ec: ExecutionContext
) extends QueryableCtrl {
) extends QueryableCtrl
with JobRenderer {
lazy val logger: Logger = Logger(getClass)
override val entityName: String = "job"
override val publicProperties: List[PublicProperty[_, _]] = properties.job ::: metaProperties[JobSteps]
Expand All @@ -40,11 +41,15 @@ class JobCtrl @Inject() (
FieldsParser[IdOrName],
(param, graph, authContext) => jobSrv.get(param.idOrName)(graph).visible(authContext)
)
override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, JobSteps, PagedResult[RichJob]](
"page",
FieldsParser[OutputParam],
(range, jobSteps, authContext) => jobSteps.richPage(range.from, range.to, withTotal = true)(_.richJob(authContext))
)
override val pageQuery: ParamQuery[OutputParam] =
Query.withParam[OutputParam, JobSteps, PagedResult[(RichJob, Option[(RichObservable, RichCase)])]](
"page",
FieldsParser[OutputParam], {
case (OutputParam(from, to, _, withParents), jobSteps, authContext) if withParents > 0 =>
jobSteps.richPage(from, to, withTotal = true)(_.richJobWithCustomRenderer(jobParents(_)(authContext))(authContext))
case (range, jobSteps, authContext) => jobSteps.richPage(range.from, range.to, withTotal = true)(_.richJob(authContext).map((_, None)))
}
)
override val outputQuery: Query = Query.outputWithContext[RichJob, JobSteps]((jobSteps, authContext) => jobSteps.richJob(authContext))

def get(jobId: String): Action[AnyContent] =
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package org.thp.thehive.connector.cortex.controllers.v0

import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.steps.StepsOps._
import org.thp.scalligraph.steps.Traversal
import org.thp.thehive.connector.cortex.services.JobSteps
import org.thp.thehive.models.{RichCase, RichObservable}

trait JobRenderer {
def jobParents(
jobSteps: JobSteps
)(implicit authContext: AuthContext): Traversal[Option[(RichObservable, RichCase)], Option[(RichObservable, RichCase)]] =
jobSteps.observable.project(_.by(_.richObservable).by(_.`case`.richCase)).map(Some(_))
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import org.thp.scalligraph.controllers.FFile
import org.thp.scalligraph.models.{Database, Entity}
import org.thp.scalligraph.services._
import org.thp.scalligraph.steps.StepsOps._
import org.thp.scalligraph.steps.{Traversal, VertexSteps}
import org.thp.scalligraph.steps.{Traversal, TraversalLike, VertexSteps}
import org.thp.scalligraph.{EntitySteps, NotFoundError}
import org.thp.thehive.connector.cortex.controllers.v0.Conversion._
import org.thp.thehive.connector.cortex.models._
Expand Down Expand Up @@ -336,4 +336,30 @@ class JobSteps(raw: GremlinScala[Vertex])(implicit @Named("with-thehive-schema")
RichJob(job.as[Job], observables)
}
}
def richJobWithCustomRenderer[A](
entityRenderer: JobSteps => TraversalLike[_, A]
)(implicit authContext: AuthContext): Traversal[(RichJob, A), (RichJob, A)] = {
val thisJob = StepLabel()
this
.as(thisJob)
.project(
_.by
.by(
_.reportObservables
.project(
_.by(_.richObservable)
.by(_.similar.filter(_.`case`.observables.outTo[ObservableJob].where(P.eq[String](thisJob.name)))._id.fold)
)
.fold
)
.by(entityRenderer)
)
.map {
case (job, observablesWithLink, renderedEntity) =>
val observables = observablesWithLink.asScala.map {
case (obs, l) => obs -> Json.obj("observableId" -> l.asScala.headOption.map(_.toString))
}
RichJob(job.as[Job], observables) -> renderedEntity
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package org.thp.thehive.connector.cortex.dto.v0

import java.util.Date

import org.thp.thehive.dto.v0.OutputObservable
import play.api.libs.json.{JsObject, Json, OFormat}

case class OutputJob(
Expand All @@ -15,7 +16,8 @@ case class OutputJob(
report: Option[JsObject],
cortexId: String,
cortexJobId: String,
id: String
id: String,
case_artifact: Option[OutputObservable]
)

object OutputJob {
Expand Down

0 comments on commit 6439769

Please sign in to comment.