Skip to content

Commit 5d3d66d

Browse files
authored
Log data loading errors in conversion / mapping application (#8202)
1 parent b53752a commit 5d3d66d

File tree

6 files changed

+62
-35
lines changed

6 files changed

+62
-35
lines changed

CHANGELOG.unreleased.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
1818
- Improved error messages for starting jobs on datasets from other organizations. [#8181](https://github.com/scalableminds/webknossos/pull/8181)
1919
- Terms of Service for Webknossos are now accepted at registration, not afterward. [#8193](https://github.com/scalableminds/webknossos/pull/8193)
2020
- Removed bounding box size restriction for inferral jobs for super users. [#8200](https://github.com/scalableminds/webknossos/pull/8200)
21+
- Improved logging for errors when loading datasets and problems arise during a conversion step. [#8202](https://github.com/scalableminds/webknossos/pull/8202)
2122

2223
### Fixed
2324
- Fix performance bottleneck when deleting a lot of trees at once. [#8176](https://github.com/scalableminds/webknossos/pull/8176)

webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala

Lines changed: 22 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataL
1010
import com.scalableminds.webknossos.datastore.models.requests.{DataReadInstruction, DataServiceDataRequest}
1111
import com.scalableminds.webknossos.datastore.storage._
1212
import com.typesafe.scalalogging.LazyLogging
13-
import net.liftweb.common.{Box, Failure, Full}
13+
import net.liftweb.common.{Box, Full}
1414
import ucar.ma2.{Array => MultiArray}
1515
import net.liftweb.common.Box.tryo
1616

@@ -20,7 +20,6 @@ import scala.concurrent.ExecutionContext
2020
class BinaryDataService(val dataBaseDir: Path,
2121
val agglomerateServiceOpt: Option[AgglomerateService],
2222
remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService],
23-
val applicationHealthService: Option[ApplicationHealthService],
2423
sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]],
2524
datasetErrorLoggingService: Option[DatasetErrorLoggingService])(implicit ec: ExecutionContext)
2625
extends FoxImplicits
@@ -55,22 +54,29 @@ class BinaryDataService(val dataBaseDir: Path,
5554
def handleDataRequests(requests: List[DataServiceDataRequest]): Fox[(Array[Byte], List[Int])] = {
5655
def convertIfNecessary(isNecessary: Boolean,
5756
inputArray: Array[Byte],
58-
conversionFunc: Array[Byte] => Box[Array[Byte]]): Box[Array[Byte]] =
59-
if (isNecessary) conversionFunc(inputArray) else Full(inputArray)
57+
conversionFunc: Array[Byte] => Fox[Array[Byte]],
58+
request: DataServiceDataRequest): Fox[Array[Byte]] =
59+
if (isNecessary) datasetErrorLoggingService match {
60+
case Some(value) =>
61+
value.withErrorLogging(request.dataSource.id, "converting bucket data", conversionFunc(inputArray))
62+
case None => conversionFunc(inputArray)
63+
} else Full(inputArray)
6064

6165
val requestsCount = requests.length
6266
val requestData = requests.zipWithIndex.map {
6367
case (request, index) =>
6468
for {
6569
data <- handleDataRequest(request)
66-
mappedData <- agglomerateServiceOpt.map { agglomerateService =>
70+
mappedDataFox <- agglomerateServiceOpt.map { agglomerateService =>
6771
convertIfNecessary(
6872
request.settings.appliedAgglomerate.isDefined && request.dataLayer.category == Category.segmentation && request.cuboid.mag.maxDim <= MaxMagForAgglomerateMapping,
6973
data,
70-
agglomerateService.applyAgglomerate(request)
74+
agglomerateService.applyAgglomerate(request),
75+
request
7176
)
72-
}.getOrElse(Full(data)) ?~> "Failed to apply agglomerate mapping"
73-
resultData <- convertIfNecessary(request.settings.halfByte, mappedData, convertToHalfByte)
77+
}.fillEmpty(Fox.successful(data)) ?~> "Failed to apply agglomerate mapping"
78+
mappedData <- mappedDataFox
79+
resultData <- convertIfNecessary(request.settings.halfByte, mappedData, convertToHalfByte, request)
7480
} yield (resultData, index)
7581
}
7682

@@ -91,28 +97,14 @@ class BinaryDataService(val dataBaseDir: Path,
9197
val bucketProvider =
9298
bucketProviderCache.getOrLoadAndPut((dataSourceId, request.dataLayer.bucketProviderCacheKey))(_ =>
9399
request.dataLayer.bucketProvider(remoteSourceDescriptorServiceOpt, dataSourceId, sharedChunkContentsCache))
94-
bucketProvider.load(readInstruction).futureBox.flatMap {
95-
case Failure(msg, Full(e: InternalError), _) =>
96-
applicationHealthService.foreach(a => a.pushError(e))
97-
logger.error(
98-
s"Caught internal error: $msg while loading a bucket for layer ${request.dataLayer.name} of dataset ${request.dataSource.id}")
99-
Fox.failure(e.getMessage)
100-
case f: Failure =>
101-
if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.team, request.dataSource.id.name))) {
102-
logger.error(
103-
s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${readInstruction.bucket} failed: ${Fox
104-
.failureChainAsString(f, includeStackTraces = true)}")
105-
datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.team, request.dataSource.id.name))
106-
}
107-
f.toFox
108-
case Full(data) =>
109-
if (data.length == 0) {
110-
val msg =
111-
s"Bucket provider returned Full, but data is zero-length array. Layer ${request.dataLayer.name} of dataset ${request.dataSource.id}, ${request.cuboid}"
112-
logger.warn(msg)
113-
Fox.failure(msg)
114-
} else Fox.successful(data)
115-
case other => other.toFox
100+
datasetErrorLoggingService match {
101+
case Some(d) =>
102+
d.withErrorLogging(
103+
request.dataSource.id,
104+
s"loading bucket for layer ${request.dataLayer.name} at ${readInstruction.bucket}, cuboid: ${request.cuboid}",
105+
bucketProvider.load(readInstruction)
106+
)
107+
case None => bucketProvider.load(readInstruction)
116108
}
117109
} else Fox.empty
118110

@@ -197,5 +189,4 @@ class BinaryDataService(val dataBaseDir: Path,
197189

198190
(closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount)
199191
}
200-
201192
}

webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import scala.concurrent.ExecutionContext
2222
class BinaryDataServiceHolder @Inject()(
2323
config: DataStoreConfig,
2424
agglomerateService: AgglomerateService,
25-
applicationHealthService: ApplicationHealthService,
2625
remoteSourceDescriptorService: RemoteSourceDescriptorService,
2726
datasetErrorLoggingService: DatasetErrorLoggingService)(implicit ec: ExecutionContext)
2827
extends LazyLogging {
@@ -46,7 +45,6 @@ class BinaryDataServiceHolder @Inject()(
4645
Paths.get(config.Datastore.baseFolder),
4746
Some(agglomerateService),
4847
Some(remoteSourceDescriptorService),
49-
Some(applicationHealthService),
5048
Some(sharedChunkContentsCache),
5149
Some(datasetErrorLoggingService)
5250
)

webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,12 @@ package com.scalableminds.webknossos.datastore.services
22

33
import org.apache.pekko.actor.ActorSystem
44
import com.google.inject.name.Named
5+
import com.scalableminds.util.tools.{Fox, TextUtils}
6+
import com.scalableminds.util.tools.Fox.box2Fox
57
import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler
8+
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
69
import com.typesafe.scalalogging.LazyLogging
10+
import net.liftweb.common.{Empty, Failure, Full}
711
import play.api.inject.ApplicationLifecycle
812

913
import javax.inject.Inject
@@ -12,6 +16,7 @@ import scala.concurrent.duration._
1216

1317
class DatasetErrorLoggingService @Inject()(
1418
val lifecycle: ApplicationLifecycle,
19+
val applicationHealthService: ApplicationHealthService,
1520
@Named("webknossos-datastore") val system: ActorSystem)(implicit val ec: ExecutionContext)
1621
extends IntervalScheduler
1722
with LazyLogging {
@@ -41,4 +46,36 @@ class DatasetErrorLoggingService @Inject()(
4146
recentErrors.remove((organizationId, datasetName))
4247

4348
override protected def tick(): Unit = recentErrors.clear()
49+
50+
def withErrorLogging(dataSourceId: DataSourceId, label: String, resultFox: Fox[Array[Byte]]): Fox[Array[Byte]] =
51+
resultFox.futureBox.flatMap {
52+
case Full(data) =>
53+
if (data.length == 0) {
54+
val msg = s"Zero-length array returned while $label for $dataSourceId"
55+
if (shouldLog(dataSourceId.team, dataSourceId.name)) {
56+
logger.warn(msg)
57+
registerLogged(dataSourceId.team, dataSourceId.name)
58+
}
59+
Fox.failure(msg)
60+
} else {
61+
Fox.successful(data)
62+
}
63+
case Failure(msg, Full(e: InternalError), _) =>
64+
logger.error(s"Caught internal error while $label for $dataSourceId:", e)
65+
applicationHealthService.pushError(e)
66+
Fox.failure(msg, Full(e))
67+
case Failure(msg, Full(exception), _) =>
68+
if (shouldLog(dataSourceId.team, dataSourceId.name)) {
69+
logger.error(s"Error while $label for $dataSourceId Stack trace: ${TextUtils.stackTraceAsString(exception)} ")
70+
registerLogged(dataSourceId.team, dataSourceId.name)
71+
}
72+
Fox.failure(msg, Full(exception))
73+
case Failure(msg, Empty, _) =>
74+
if (shouldLog(dataSourceId.team, dataSourceId.name)) {
75+
logger.error(s"Error while $label for $dataSourceId, Empty failure")
76+
registerLogged(dataSourceId.team, dataSourceId.name)
77+
}
78+
Fox.failure(msg)
79+
case other => other.toFox
80+
}
4481
}

webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ class EditableMappingService @Inject()(
103103

104104
private def generateId: String = UUID.randomUUID.toString
105105

106-
val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None, None)
106+
val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None)
107107
adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1)
108108
private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService
109109

webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ class VolumeTracingService @Inject()(
8888

8989
/* We want to reuse the bucket loading methods from binaryDataService for the volume tracings, however, it does not
9090
actually load anything from disk, unlike its “normal” instance in the datastore (only from the volume tracing store) */
91-
private val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None, None)
91+
private val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None)
9292

9393
adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1)
9494
val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService

0 commit comments

Comments
 (0)