Skip to content

Commit 5ace6e2

Browse files
Allow to continue upload after browser restart for same user (#7981)
* implement test route checking if chunk is present * WIP: Implement ongoing uploads listing route * implement first version of robust resumable upload * update build backend script command to remove autogenerated message files * fix frontend typing * refactor code * format backend * fix test file route * clear uploadId after successful upload in frontend; reenable checking for duplicate dataset names when not continuing an upload * format backend * apply feedback * fix expected format from backend when requesting ongoinguploads * apply feedback * remove unused import * consistent renaming to unfinished uploads * add sanity check that file names must be equal to initial upload * do not require same order of files * include file names in error to make searching for these file more easy and less guessy * fix "files not matching initial upload try" error message * add changelog entry --------- Co-authored-by: Michael Büßemeyer <MichaelBuessemeyer@users.noreply.github.com>
1 parent dd118b2 commit 5ace6e2

18 files changed

+648
-294
lines changed

CHANGELOG.unreleased.md

+1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
2929
- The AI-based Quick Select can now be triggered with a single click. Drawing a rectangle is still supported. [#7993](https://github.com/scalableminds/webknossos/pull/7993)
3030
- To improve performance, only the visible bounding boxes are rendered in the bounding box tab (so-called virtualization). [#7974](https://github.com/scalableminds/webknossos/pull/7974)
3131
- Added support for reading zstd-compressed zarr2 datasets [#7964](https://github.com/scalableminds/webknossos/pull/7964)
32+
- Added the option to resume an unfinished upload event after browser restarts. [#7981](https://github.com/scalableminds/webknossos/pull/7981)
3233
- The alignment job is in a separate tab of the "AI Tools" now. The "Align Sections" AI job now supports including manually created matches between adjacent section given as skeletons. [#7967](https://github.com/scalableminds/webknossos/pull/7967)
3334
- Added `api.tracing.createNode(position, options)`` to the front-end API. [#7998](https://github.com/scalableminds/webknossos/pull/7998)
3435
- Added a feature to register all segments for a given bounding box at once via the context menu of the bounding box. [#7979](https://github.com/scalableminds/webknossos/pull/7979)

app/controllers/DatasetController.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ class DatasetController @Inject()(userService: UserService,
187187
searchQuery,
188188
request.identity.map(_._id),
189189
recursive.getOrElse(false),
190-
limit
190+
limitOpt = limit
191191
)
192192
} yield Json.toJson(datasetInfos)
193193
} else {

app/controllers/WKRemoteDataStoreController.scala

+32-1
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,11 @@ package controllers
33
import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, GlobalAccessContext}
44
import com.scalableminds.util.tools.Fox
55
import com.scalableminds.webknossos.datastore.controllers.JobExportProperties
6+
import com.scalableminds.webknossos.datastore.models.UnfinishedUpload
67
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
78
import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource}
8-
import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation}
99
import com.scalableminds.webknossos.datastore.services.DataStoreStatus
10+
import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation}
1011
import com.typesafe.scalalogging.LazyLogging
1112
import mail.{MailchimpClient, MailchimpTag}
1213

@@ -19,6 +20,7 @@ import models.folder.FolderDAO
1920
import models.job.JobDAO
2021
import models.organization.OrganizationDAO
2122
import models.storage.UsedStorageService
23+
import models.team.TeamDAO
2224
import models.user.{MultiUserDAO, User, UserDAO, UserService}
2325
import net.liftweb.common.Full
2426
import play.api.i18n.{Messages, MessagesProvider}
@@ -41,6 +43,7 @@ class WKRemoteDataStoreController @Inject()(
4143
datasetDAO: DatasetDAO,
4244
userDAO: UserDAO,
4345
folderDAO: FolderDAO,
46+
teamDAO: TeamDAO,
4447
jobDAO: JobDAO,
4548
multiUserDAO: MultiUserDAO,
4649
credentialDAO: CredentialDAO,
@@ -82,6 +85,34 @@ class WKRemoteDataStoreController @Inject()(
8285
}
8386
}
8487

88+
def getUnfinishedUploadsForUser(name: String,
89+
key: String,
90+
token: String,
91+
organizationName: String): Action[AnyContent] =
92+
Action.async { implicit request =>
93+
dataStoreService.validateAccess(name, key) { _ =>
94+
for {
95+
user <- bearerTokenService.userForToken(token)
96+
organization <- organizationDAO.findOneByName(organizationName)(GlobalAccessContext) ?~> Messages(
97+
"organization.notFound",
98+
user._organization) ~> NOT_FOUND
99+
_ <- bool2Fox(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN
100+
datasets <- datasetService.getAllUnfinishedDatasetUploadsOfUser(user._id, user._organization)(
101+
GlobalAccessContext) ?~> "dataset.upload.couldNotLoadUnfinishedUploads"
102+
teamIdsPerDataset <- Fox.combined(datasets.map(dataset => teamDAO.findAllowedTeamIdsForDataset(dataset.id)))
103+
unfinishedUploads = datasets.zip(teamIdsPerDataset).map {
104+
case (d, teamIds) =>
105+
new UnfinishedUpload("<filled-in by datastore>",
106+
d.dataSourceId,
107+
d.folderId.toString,
108+
d.created,
109+
None, // Filled by datastore.
110+
teamIds.map(_.toString))
111+
}
112+
} yield Ok(Json.toJson(unfinishedUploads))
113+
}
114+
}
115+
85116
private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier,
86117
requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Unit] =
87118
for {

app/models/dataset/Dataset.scala

+29-13
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{
1515
Category,
1616
CoordinateTransformation,
1717
CoordinateTransformationType,
18+
DataSourceId,
1819
ElementClass,
1920
ThinPlateSplineCorrespondences,
2021
DataLayerLike => DataLayer
@@ -79,7 +80,9 @@ case class DatasetCompactInfo(
7980
isUnreported: Boolean,
8081
colorLayerNames: List[String],
8182
segmentationLayerNames: List[String],
82-
)
83+
) {
84+
def dataSourceId = new DataSourceId(name, owningOrganization)
85+
}
8386

8487
object DatasetCompactInfo {
8588
implicit val jsonFormat: Format[DatasetCompactInfo] = Json.format[DatasetCompactInfo]
@@ -211,29 +214,36 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
211214
folderIdOpt,
212215
uploaderIdOpt,
213216
searchQuery,
214-
includeSubfolders)
217+
includeSubfolders,
218+
None,
219+
None)
215220
limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"")
216221
r <- run(q"SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery".as[DatasetsRow])
217222
parsed <- parseAll(r)
218223
} yield parsed
219224

220-
def findAllCompactWithSearch(isActiveOpt: Option[Boolean],
221-
isUnreported: Option[Boolean],
222-
organizationIdOpt: Option[ObjectId],
223-
folderIdOpt: Option[ObjectId],
224-
uploaderIdOpt: Option[ObjectId],
225-
searchQuery: Option[String],
226-
requestingUserIdOpt: Option[ObjectId],
227-
includeSubfolders: Boolean,
228-
limitOpt: Option[Int])(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
225+
def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None,
226+
isUnreported: Option[Boolean] = None,
227+
organizationIdOpt: Option[ObjectId] = None,
228+
folderIdOpt: Option[ObjectId] = None,
229+
uploaderIdOpt: Option[ObjectId] = None,
230+
searchQuery: Option[String] = None,
231+
requestingUserIdOpt: Option[ObjectId] = None,
232+
includeSubfolders: Boolean = false,
233+
statusOpt: Option[String] = None,
234+
createdSinceOpt: Option[Instant] = None,
235+
limitOpt: Option[Int] = None,
236+
)(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
229237
for {
230238
selectionPredicates <- buildSelectionPredicates(isActiveOpt,
231239
isUnreported,
232240
organizationIdOpt,
233241
folderIdOpt,
234242
uploaderIdOpt,
235243
searchQuery,
236-
includeSubfolders)
244+
includeSubfolders,
245+
statusOpt,
246+
createdSinceOpt)
237247
limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"")
238248
query = q"""
239249
SELECT
@@ -325,7 +335,9 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
325335
folderIdOpt: Option[ObjectId],
326336
uploaderIdOpt: Option[ObjectId],
327337
searchQuery: Option[String],
328-
includeSubfolders: Boolean)(implicit ctx: DBAccessContext): Fox[SqlToken] =
338+
includeSubfolders: Boolean,
339+
statusOpt: Option[String],
340+
createdSinceOpt: Option[Instant])(implicit ctx: DBAccessContext): Fox[SqlToken] =
329341
for {
330342
accessQuery <- readAccessQuery
331343
folderPredicate = folderIdOpt match {
@@ -339,6 +351,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
339351
organizationPredicate = organizationIdOpt
340352
.map(organizationId => q"_organization = $organizationId")
341353
.getOrElse(q"TRUE")
354+
statusPredicate = statusOpt.map(status => q"status = $status").getOrElse(q"TRUE")
355+
createdSincePredicate = createdSinceOpt.map(createdSince => q"created >= $createdSince").getOrElse(q"TRUE")
342356
searchPredicate = buildSearchPredicate(searchQuery)
343357
isUnreportedPredicate = buildIsUnreportedPredicate(isUnreported)
344358
} yield q"""
@@ -348,6 +362,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
348362
AND ($isActivePredicate)
349363
AND ($isUnreportedPredicate)
350364
AND ($organizationPredicate)
365+
AND ($statusPredicate)
366+
AND ($createdSincePredicate)
351367
AND $accessQuery
352368
"""
353369

app/models/dataset/DatasetService.scala

+15-2
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import security.RandomIDGenerator
2424
import utils.{ObjectId, WkConf}
2525

2626
import javax.inject.Inject
27+
import scala.concurrent.duration._
2728
import scala.concurrent.{ExecutionContext, Future}
2829

2930
class DatasetService @Inject()(organizationDAO: OrganizationDAO,
@@ -69,6 +70,18 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
6970
createDataset(dataStore, organizationName, unreportedDatasource)
7071
}
7172

73+
def getAllUnfinishedDatasetUploadsOfUser(userId: ObjectId, organizationId: ObjectId)(
74+
implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
75+
datasetDAO.findAllCompactWithSearch(
76+
uploaderIdOpt = Some(userId),
77+
organizationIdOpt = Some(organizationId),
78+
isActiveOpt = Some(false),
79+
includeSubfolders = true,
80+
statusOpt = Some(notYetUploadedStatus),
81+
// Only list pending uploads since the two last weeks.
82+
createdSinceOpt = Some(Instant.now - (14 days))
83+
) ?~> "dataset.list.fetchFailed"
84+
7285
private def createDataset(
7386
dataStore: DataStore,
7487
owningOrganization: String,
@@ -82,14 +95,14 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
8295
val dataSourceHash = if (dataSource.isUsable) Some(dataSource.hashCode()) else None
8396
for {
8497
organization <- organizationDAO.findOneByName(owningOrganization)
85-
orbanizationRootFolder <- folderDAO.findOne(organization._rootFolder)
98+
organizationRootFolder <- folderDAO.findOne(organization._rootFolder)
8699
dataset = Dataset(
87100
newId,
88101
dataStore.name,
89102
organization._id,
90103
publication,
91104
None,
92-
orbanizationRootFolder._id,
105+
organizationRootFolder._id,
93106
dataSourceHash,
94107
dataSource.defaultViewConfiguration,
95108
adminViewConfiguration = None,

conf/messages

+1
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ dataset.upload.Datastore.restricted=Your organization is not allowed to upload d
107107
dataset.upload.validation.failed=Failed to validate Dataset information for upload.
108108
dataset.upload.linkRestricted=Can only link layers of datasets that are either public or allowed to be administrated by your account
109109
dataset.upload.invalidLinkedLayers=Could not link all requested layers
110+
dataset.upload.couldNotLoadUnfinishedUploads=Could not load unfinished uploads of user.
110111
dataset.upload.noFiles=Tried to finish upload with no files. May be a retry of a failed finish request, see previous errors.
111112
dataset.upload.storageExceeded=Cannot upload dataset because the storage quota of the organization is exceeded.
112113
dataset.upload.finishFailed=Failed to finalize dataset upload.

conf/webknossos.latest.routes

+1
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,7 @@ PUT /datastores/:name/datasource
108108
PUT /datastores/:name/datasources controllers.WKRemoteDataStoreController.updateAll(name: String, key: String)
109109
PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String)
110110
POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String)
111+
GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String)
111112
POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean)
112113
POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String)
113114
GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String)

frontend/javascripts/admin/admin_rest_api.ts

+32-1
Original file line numberDiff line numberDiff line change
@@ -1273,7 +1273,7 @@ export function createResumableUpload(datastoreUrl: string, uploadId: string): P
12731273
(token) =>
12741274
// @ts-expect-error ts-migrate(2739) FIXME: Type 'Resumable' is missing the following properti... Remove this comment to see the full error message
12751275
new ResumableJS({
1276-
testChunks: false,
1276+
testChunks: true,
12771277
target: `${datastoreUrl}/data/datasets?token=${token}`,
12781278
chunkSize: 10 * 1024 * 1024, // 10MB
12791279
permanentErrors: [400, 403, 404, 409, 415, 500, 501],
@@ -1291,6 +1291,7 @@ type ReserveUploadInformation = {
12911291
organization: string;
12921292
name: string;
12931293
totalFileCount: number;
1294+
filePaths: Array<string>;
12941295
initialTeams: Array<string>;
12951296
folderId: string | null;
12961297
};
@@ -1307,6 +1308,36 @@ export function reserveDatasetUpload(
13071308
);
13081309
}
13091310

1311+
export type UnfinishedUpload = {
1312+
uploadId: string;
1313+
datasetId: { name: string; organizationName: string };
1314+
folderId: string;
1315+
created: number;
1316+
filePaths: Array<string> | null | undefined;
1317+
allowedTeams: Array<string>;
1318+
};
1319+
1320+
type OldDatasetIdFormat = { name: string; team: string };
1321+
1322+
export function getUnfinishedUploads(
1323+
datastoreHost: string,
1324+
organizationName: string,
1325+
): Promise<UnfinishedUpload[]> {
1326+
return doWithToken(async (token) => {
1327+
const unfinishedUploads = (await Request.receiveJSON(
1328+
`/data/datasets/getUnfinishedUploads?token=${token}&organizationName=${organizationName}`,
1329+
{
1330+
host: datastoreHost,
1331+
},
1332+
)) as Array<UnfinishedUpload & { dataSourceId: OldDatasetIdFormat }>;
1333+
// Rename "team" to "organization" as this is the actual used current naming.
1334+
return unfinishedUploads.map(({ dataSourceId: { name, team }, ...rest }) => ({
1335+
...rest,
1336+
datasetId: { name, organizationName: team },
1337+
}));
1338+
});
1339+
}
1340+
13101341
export function finishDatasetUpload(
13111342
datastoreHost: string,
13121343
uploadInformation: ArbitraryObject,

frontend/javascripts/admin/dataset/dataset_components.tsx

+17-2
Original file line numberDiff line numberDiff line change
@@ -88,30 +88,36 @@ export function DatasetNameFormItem({
8888
activeUser,
8989
initialName,
9090
label,
91+
allowDuplicate,
92+
disabled,
9193
}: {
9294
activeUser: APIUser | null | undefined;
9395
initialName?: string;
9496
label?: string;
97+
allowDuplicate?: boolean;
98+
disabled?: boolean;
9599
}) {
96100
return (
97101
<FormItem
98102
name="name"
99103
label={label || "Dataset Name"}
100104
hasFeedback
101105
initialValue={initialName}
102-
rules={getDatasetNameRules(activeUser)}
106+
rules={getDatasetNameRules(activeUser, !allowDuplicate)}
103107
validateFirst
104108
>
105-
<Input />
109+
<Input disabled={disabled} />
106110
</FormItem>
107111
);
108112
}
109113
export function DatastoreFormItem({
110114
datastores,
111115
hidden,
116+
disabled,
112117
}: {
113118
datastores: Array<APIDataStore>;
114119
hidden?: boolean;
120+
disabled?: boolean;
115121
}) {
116122
return (
117123
<FormItem
@@ -131,6 +137,7 @@ export function DatastoreFormItem({
131137
showSearch
132138
placeholder="Select a Datastore"
133139
optionFilterProp="label"
140+
disabled={disabled}
134141
style={{
135142
width: "100%",
136143
}}
@@ -147,12 +154,16 @@ export function AllowedTeamsFormItem({
147154
isDatasetManagerOrAdmin,
148155
selectedTeams,
149156
setSelectedTeams,
157+
afterFetchedTeams,
150158
formRef,
159+
disabled,
151160
}: {
152161
isDatasetManagerOrAdmin: boolean;
153162
selectedTeams: APITeam | Array<APITeam>;
154163
setSelectedTeams: (teams: APITeam | Array<APITeam>) => void;
164+
afterFetchedTeams?: (arg0: Array<APITeam>) => void;
155165
formRef: React.RefObject<FormInstance<any>>;
166+
disabled?: boolean;
156167
}) {
157168
return (
158169
<FormItemWithInfo
@@ -165,6 +176,7 @@ export function AllowedTeamsFormItem({
165176
mode="multiple"
166177
value={selectedTeams}
167178
allowNonEditableTeams={isDatasetManagerOrAdmin}
179+
disabled={disabled}
168180
onChange={(selectedTeams) => {
169181
if (formRef.current == null) return;
170182

@@ -179,6 +191,9 @@ export function AllowedTeamsFormItem({
179191
setSelectedTeams(selectedTeams);
180192
}}
181193
afterFetchedTeams={(fetchedTeams) => {
194+
if (afterFetchedTeams) {
195+
afterFetchedTeams(fetchedTeams);
196+
}
182197
if (!features().isWkorgInstance) {
183198
return;
184199
}

0 commit comments

Comments
 (0)