Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion client/dive-common/apispec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,8 @@ interface Api {
// Non-Endpoint shared functions
openFromDisk(datasetType: DatasetType | 'calibration' | 'annotation' | 'text' | 'zip', directory?: boolean):
Promise<{canceled?: boolean; filePaths: string[]; fileList?: File[]; root?: string}>;
importAnnotationFile(id: string, path: string, file?: File): Promise<boolean>;
importAnnotationFile(id: string, path: string, file?: File,
additive?: boolean, additivePrepend?: string): Promise<boolean>;
}
const ApiSymbol = Symbol('api');

Expand Down
67 changes: 55 additions & 12 deletions client/dive-common/components/ImportAnnotations.vue
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ export default defineComponent({
const { prompt } = usePrompt();
const processing = ref(false);
const menuOpen = ref(false);
const additive = ref(false);
const additivePrepend = ref('');
const openUpload = async () => {
try {
const ret = await openFromDisk('annotation');
Expand All @@ -44,9 +46,21 @@ export default defineComponent({
let importFile = false;
processing.value = true;
if (ret.fileList?.length) {
importFile = await importAnnotationFile(props.datasetId, path, ret.fileList[0]);
importFile = await importAnnotationFile(
props.datasetId,
path,
ret.fileList[0],
additive.value,
additivePrepend.value,
);
} else {
importFile = await importAnnotationFile(props.datasetId, path);
importFile = await importAnnotationFile(
props.datasetId,
path,
undefined,
additive.value,
additivePrepend.value,
);
}

if (importFile) {
Expand All @@ -68,6 +82,8 @@ export default defineComponent({
openUpload,
processing,
menuOpen,
additive,
additivePrepend,
};
},
});
Expand Down Expand Up @@ -133,16 +149,43 @@ export default defineComponent({
target="_blank"
>Data Format Documentation</a>
</v-card-text>
<v-card-actions>
<v-btn
depressed
block
:disabled="!datasetId || processing"
@click="openUpload"
>
Import
</v-btn>
</v-card-actions>
<v-container>
<v-col>
<v-row>
<v-btn
depressed
block
:disabled="!datasetId || processing"
@click="openUpload"
>
Import
</v-btn>
</v-row>
<v-row>
<v-checkbox
:input-value="!additive"
label="Overwrite"
@change="additive = !$event"
/>
</v-row>
<div v-if="additive">
<div
v-if="additive"
class="pa-2"
>
Imported annotations will be added to existing annotations.
</div>
<div class="pa-2">
The types can be modified to have a prepended value for comparison.
</div>
<v-text-field
v-model="additivePrepend"
label="Prepend to types"
clearable
/>
</div>
</v-col>
</v-container>
</v-card>
</template>
</v-menu>
Expand Down
6 changes: 4 additions & 2 deletions client/platform/desktop/backend/ipcService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,10 @@ export default function register() {
return ret;
});

ipcMain.handle('import-annotation', async (event, { id, path }: { id: string; path: string }) => {
const ret = await common.dataFileImport(settings.get(), id, path);
ipcMain.handle('import-annotation', async (event, {
id, path, additive, additivePrepend,
}: { id: string; path: string; additive: boolean; additivePrepend: string }) => {
const ret = await common.dataFileImport(settings.get(), id, path, additive, additivePrepend);
return ret;
});

Expand Down
37 changes: 35 additions & 2 deletions client/platform/desktop/backend/native/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -548,6 +548,8 @@ async function _ingestFilePath(
datasetId: string,
path: string,
imageMap?: Map<string, number>,
additive = false,
additivePrepend = '',
): Promise<(DatasetMetaMutable & { fps?: number }) | null> {
if (!fs.existsSync(path)) {
return null;
Expand Down Expand Up @@ -588,6 +590,32 @@ async function _ingestFilePath(
} else if (YAMLFileName.test(path)) {
annotations = await kpf.parse([path]);
}
// If it is additive we need to re-ID tracks and do additive prepends
if (additive) {
// Load previous data
const existing = await loadDetections(settings, datasetId);
const { tracks } = existing;
let maxTrackId = -1;
Object.values(tracks).forEach((item) => {
maxTrackId = Math.max(item.id, maxTrackId);
});
maxTrackId += 1;
const newTracks = Object.values(annotations.tracks);
for (let i = 0; i < newTracks.length; i += 1) {
const newTrack = newTracks[i];
newTrack.id += maxTrackId;
if (additivePrepend !== '') {
const { confidencePairs } = newTrack;
for (let k = 0; k < confidencePairs.length; k += 1) {
confidencePairs[k] = [`${additivePrepend}_${confidencePairs[k][0]}`, confidencePairs[k][1]];
}
newTrack.confidencePairs = confidencePairs;
}
existing.tracks[newTrack.id] = newTrack;
}
annotations.tracks = existing.tracks;
}

if (Object.values(annotations.tracks).length || Object.values(annotations.groups).length) {
const processed = processTrackAttributes(Object.values(annotations.tracks));
meta.attributes = processed.attributes;
Expand Down Expand Up @@ -618,6 +646,8 @@ async function ingestDataFiles(
absPaths: string[],
multiCamResults?: Record<string, string>,
imageMap?: Map<string, number>,
additive = false,
additivePrepend = '',
): Promise<{
processedFiles: string[];
meta: DatasetMetaMutable & { fps?: number };
Expand All @@ -628,7 +658,9 @@ async function ingestDataFiles(
for (let i = 0; i < absPaths.length; i += 1) {
const path = absPaths[i];
// eslint-disable-next-line no-await-in-loop
const newMeta = await _ingestFilePath(settings, datasetId, path, imageMap);
const newMeta = await _ingestFilePath(
settings, datasetId, path, imageMap, additive, additivePrepend,
);
if (newMeta !== null) {
merge(meta, newMeta);
processedFiles.push(path);
Expand Down Expand Up @@ -876,11 +908,12 @@ function validImageNamesMap(jsonMeta: JsonMeta) {
return undefined;
}

async function dataFileImport(settings: Settings, id: string, path: string) {
async function dataFileImport(settings: Settings, id: string, path: string, additive = false, additivePrepend = '') {
const projectDirData = await getValidatedProjectDir(settings, id);
const jsonMeta = await loadJsonMetadata(projectDirData.metaFileAbsPath);
const result = await ingestDataFiles(
settings, id, [path], undefined, validImageNamesMap(jsonMeta),
additive, additivePrepend,
);
merge(jsonMeta, result.meta);
await _saveAsJson(npath.join(projectDirData.basePath, JsonMetaFileName), jsonMeta);
Expand Down
7 changes: 5 additions & 2 deletions client/platform/desktop/frontend/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,11 @@ function importMultiCam(args: MultiCamImportArgs):
return ipcRenderer.invoke('import-multicam-media', { args });
}

function importAnnotationFile(id: string, path: string): Promise<boolean> {
return ipcRenderer.invoke('import-annotation', { id, path });
// eslint-disable-next-line @typescript-eslint/no-unused-vars
function importAnnotationFile(id: string, path: string, _htmlFile = undefined, additive = false, additivePrepend = ''): Promise<boolean> {
return ipcRenderer.invoke('import-annotation', {
id, path, additive, additivePrepend,
});
}

function finalizeImport(args: DesktopMediaImportResponse): Promise<JsonMeta> {
Expand Down
4 changes: 2 additions & 2 deletions client/platform/web-girder/api/dataset.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ function makeViameFolder({
);
}

async function importAnnotationFile(parentId: string, path: string, file?: HTMLFile) {
async function importAnnotationFile(parentId: string, path: string, file?: HTMLFile, additive = false, additivePrepend = '') {
if (file === undefined) {
return false;
}
Expand All @@ -108,7 +108,7 @@ async function importAnnotationFile(parentId: string, path: string, file?: HTMLF
headers: { 'Content-Type': 'application/octet-stream' },
});
if (uploadResponse.status === 200) {
const final = await postProcess(parentId, true);
const final = await postProcess(parentId, true, false, additive, additivePrepend);
return final.status === 200;
}
}
Expand Down
6 changes: 4 additions & 2 deletions client/platform/web-girder/api/rpc.service.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import girderRest from 'platform/web-girder/plugins/girder';
import { Pipe } from 'dive-common/apispec';

function postProcess(folderId: string, skipJobs = false, skipTranscoding = false) {
function postProcess(folderId: string, skipJobs = false, skipTranscoding = false, additive = false, additivePrepend = '') {
return girderRest.post(`dive_rpc/postprocess/${folderId}`, null, {
params: { skipJobs, skipTranscoding },
params: {
skipJobs, skipTranscoding, additive, additivePrepend,
},
});
}

Expand Down
30 changes: 30 additions & 0 deletions server/dive_server/crud_annotation.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,36 @@ def get_annotations(dataset: types.GirderModel, revision: Optional[int] = None):
return annotations


def add_annotations(
dataset: types.GirderModel, new_tracks: dict, prepend='', revision: Optional[int] = None
):
tracks = TrackItem().list(dataset, revision=revision)
annotations: types.DIVEAnnotationSchema = {
'tracks': {},
'groups': {},
'version': constants.AnnotationsCurrentVersion,
}
max_track_id = -1
for t in tracks:
serialized = models.Track(**t).dict(exclude_none=True)
annotations['tracks'][serialized['id']] = serialized
max_track_id = max(max_track_id, serialized['id'])
# Now add in the new tracks while renaming them
for key in new_tracks.keys():
new_id = int(key) + max_track_id + 1
new_id_str = str(new_id)
annotations['tracks'][new_id_str] = new_tracks[key]
annotations['tracks'][new_id_str]['id'] = new_id
if prepend != '':
track = annotations['tracks'][new_id_str]
newPairs = []
for confidencePairs in track['confidencePairs']:
newPairs.append([f'{prepend}_{confidencePairs[0]}', confidencePairs[1]])
annotations['tracks'][new_id_str]['confidencePairs'] = newPairs

return annotations['tracks']


def get_labels(user: types.GirderUserModel, published=False, shared=False):
"""Find all the labels in all datasets belonging to the user"""
accessLevel = AccessType.WRITE
Expand Down
26 changes: 21 additions & 5 deletions server/dive_server/crud_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,9 @@ def _get_data_by_type(
return None


def process_items(folder: types.GirderModel, user: types.GirderUserModel):
def process_items(
folder: types.GirderModel, user: types.GirderUserModel, additive=False, additivePrepend=''
):
"""
Discover unprocessed items in a dataset and process them by type in order of creation
"""
Expand All @@ -364,7 +366,10 @@ def process_items(folder: types.GirderModel, user: types.GirderUserModel):
# Processing order: oldest to newest
sort=[("created", pymongo.ASCENDING)],
)
auxiliary = crud.get_or_create_auxiliary_folder(folder, user)
auxiliary = crud.get_or_create_auxiliary_folder(
folder,
user,
)
for item in unprocessed_items:
file: Optional[types.GirderModel] = next(Item().childFiles(item), None)
if file is None:
Expand All @@ -386,10 +391,16 @@ def process_items(folder: types.GirderModel, user: types.GirderUserModel):
item['meta'][constants.ProcessedMarker] = True
Item().move(item, auxiliary)
if results['annotations']:
updated_tracks = results['annotations']['tracks'].values()
if additive: # get annotations and add them to the end
tracks = crud_annotation.add_annotations(
folder, results['annotations']['tracks'], additivePrepend
)
updated_tracks = tracks.values()
crud_annotation.save_annotations(
folder,
user,
upsert_tracks=results['annotations']['tracks'].values(),
upsert_tracks=updated_tracks,
upsert_groups=results['annotations']['groups'].values(),
overwrite=True,
description=f'Import {results["type"].name} from {file["name"]}',
Expand All @@ -401,7 +412,12 @@ def process_items(folder: types.GirderModel, user: types.GirderUserModel):


def postprocess(
user: types.GirderUserModel, dsFolder: types.GirderModel, skipJobs: bool, skipTranscoding=False
user: types.GirderUserModel,
dsFolder: types.GirderModel,
skipJobs: bool,
skipTranscoding=False,
additive=False,
additivePrepend='',
) -> types.GirderModel:
"""
Post-processing to be run after media/annotation import
Expand Down Expand Up @@ -512,5 +528,5 @@ def postprocess(

Folder().save(dsFolder)

process_items(dsFolder, user)
process_items(dsFolder, user, additive, additivePrepend)
return dsFolder
22 changes: 20 additions & 2 deletions server/dive_server/views_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,24 @@ def run_training(self, body, pipelineName, config, annotatedFramesOnly):
default=False,
required=False,
)
.param(
"additive",
"Whether to add new annotations to existing ones",
paramType="formData",
dataType="boolean",
default=False,
required=False,
)
.param(
"additivePrepend",
"When using additive the prepend to types: 'prepend_type'",
paramType="formData",
dataType="string",
default='',
required=False,
)
)
def postprocess(self, folder, skipJobs, skipTranscoding):
return crud_rpc.postprocess(self.getCurrentUser(), folder, skipJobs, skipTranscoding)
def postprocess(self, folder, skipJobs, skipTranscoding, additive, additivePrepend):
return crud_rpc.postprocess(
self.getCurrentUser(), folder, skipJobs, skipTranscoding, additive, additivePrepend
)
3 changes: 2 additions & 1 deletion server/dive_tasks/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,8 @@ def convert_video(
jsoninfo = json.loads(stdout)
videostream = list(filter(lambda x: x["codec_type"] == "video", jsoninfo["streams"]))
if len(videostream) != 1:
raise Exception('Expected 1 video stream, found {}'.format(len(videostream)))
print('Expected 1 video stream, found {}'.format(len(videostream)))
print('Using first Video Stream found')

# Extract average framerate
avgFpsString: str = videostream[0]["avg_frame_rate"]
Expand Down
Loading