From 0fe9e2c498ffea63aa1ae61bcd100bc50b9cdcf3 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 27 Dec 2023 20:45:31 -0600
Subject: [PATCH 01/71] Generalize CSV file picker
---
.../lib/components/Molecules/FilePicker.tsx | 318 +++++++++++++++++-
.../__tests__/useNotificationsFetch.test.ts | 1 -
.../js_src/lib/components/WbImport/helpers.ts | 2 +-
.../js_src/lib/components/WbImport/index.tsx | 311 +++--------------
4 files changed, 366 insertions(+), 266 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
index a914502e3a1..62f8dabfc59 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
@@ -1,11 +1,29 @@
import React from 'react';
+import type { LocalizedString } from 'typesafe-i18n';
+import { useAsyncState } from '../../hooks/useAsyncState';
import { useBooleanState } from '../../hooks/useBooleanState';
+import { useStateForContext } from '../../hooks/useStateForContext';
+import { useTriggerState } from '../../hooks/useTriggerState';
import { attachmentsText } from '../../localization/attachments';
import { commonText } from '../../localization/common';
-import type { RA } from '../../utils/types';
+import { wbText } from '../../localization/workbench';
+import type { GetOrSet, GetSet, RA } from '../../utils/types';
+import { localized } from '../../utils/types';
+import { Container, H2, H3 } from '../Atoms';
+import { Button } from '../Atoms/Button';
import { className } from '../Atoms/className';
+import { Input, Select } from '../Atoms/Form';
import type { TagProps } from '../Atoms/wrapper';
+import {
+ extractHeader,
+ parseCsv,
+ wbImportPreviewSize,
+} from '../WbImport/helpers';
+import { encodings } from '../WorkBench/encodings';
+import { loadingGif } from '.';
+import type { AutoCompleteItem } from './AutoComplete';
+import { AutoComplete } from './AutoComplete';
import { useDragDropFiles } from './useDragDropFiles';
export function FilePicker({
@@ -172,3 +190,301 @@ export const fileToText = async (
fileReader.addEventListener('error', () => reject(fileReader.error));
fileReader.readAsText(file, encoding);
});
+
+export function CsvFilePicker({
+ header,
+ getSetHasHeader,
+ onFileImport: handleFileImport,
+ onFileSelected: handleFileSelected,
+}: {
+ readonly header: LocalizedString;
+ readonly getSetHasHeader: GetSet;
+ readonly onFileImport: (hasHeader: boolean) => void;
+ readonly onFileSelected?: (file: File) => void;
+}): JSX.Element {
+ const [file, setFile] = React.useState();
+
+ return (
+
+ {header}
+
+ {
+ if (typeof handleFileSelected === 'function')
+ handleFileSelected(file);
+ setFile(file);
+ }}
+ />
+
+ {typeof file === 'object' && (
+
+ )}
+
+ );
+}
+
+export function CsvFilePreview({
+ file,
+ getSetHasHeader,
+ children,
+ onFileImport: handleFileImport,
+}: {
+ readonly file: File;
+ readonly getSetHasHeader: GetOrSet;
+ readonly children?: JSX.Element | undefined;
+ readonly onFileImport: (
+ hasHeader: boolean,
+ encoding: string,
+ getSetDelimiter: GetOrSet
+ ) => void;
+}): JSX.Element {
+ const [encoding, setEncoding] = React.useState('utf-8');
+ const getSetDelimiter = useStateForContext(undefined);
+ const preview = useCsvPreview(file, encoding, getSetDelimiter);
+
+ return (
+
+ handleFileImport(hasHeader, encoding, getSetDelimiter)
+ }
+ >
+ {children === undefined ? <>> : children}
+
+
+
+ );
+}
+
+export function useCsvPreview(
+ file: File,
+ encoding: string,
+ getSetDelimiter: GetSet
+): LocalizedString | RA> | undefined {
+ const [delimiter, setDelimiter] = getSetDelimiter;
+ const [preview] = useAsyncState>>(
+ React.useCallback(
+ async () =>
+ parseCsv(
+ file,
+ encoding,
+ [delimiter, setDelimiter],
+ wbImportPreviewSize
+ ).catch((error) => localized(error.message)),
+ [file, encoding, delimiter, setDelimiter]
+ ),
+ false
+ );
+ return preview;
+}
+
+function ChooseEncoding({
+ encoding = '',
+ isDisabled,
+ onChange: handleChange,
+}: {
+ readonly encoding: string;
+ readonly isDisabled: boolean;
+ readonly onChange: (encoding: string) => void;
+}): JSX.Element {
+ return (
+
+ );
+}
+
+export const delimiters: RA> = [
+ { label: wbText.comma(), searchValue: ',', data: ',' },
+ { label: wbText.tab(), searchValue: '\t', data: '\t' },
+ { label: wbText.semicolon(), searchValue: ';', data: ';' },
+ { label: wbText.space(), searchValue: ' ', data: ' ' },
+ { label: wbText.pipe(), searchValue: '|', data: '|' },
+];
+
+function ChooseDelimiter({
+ isDisabled,
+ getSetDelimiter: [delimiter, handleChange],
+}: {
+ readonly isDisabled: boolean;
+ readonly getSetDelimiter: GetSet;
+}): JSX.Element {
+ const [state, setState] = useTriggerState(delimiter);
+
+ /**
+ * Don't disable the component if it is currently focused, as disabling it
+ * would lead to focus loss, which is bad UX and an accessibility issue.
+ */
+ const inputRef = React.useRef(null);
+ const isFocused = inputRef.current === document.activeElement;
+ const disabled = isDisabled && !isFocused;
+
+ return (
+
+ );
+}
+
+export function Layout({
+ preview,
+ getSetHasHeader: [hasHeader = true, setHasHeader],
+ children,
+ onFileImport: handleFileImport,
+}: {
+ readonly preview: LocalizedString | RA> | undefined;
+ readonly getSetHasHeader: GetOrSet;
+ readonly children?: JSX.Element | RA;
+ readonly onFileImport: (hasHeader: boolean) => void;
+}): JSX.Element {
+ return (
+ <>
+
+ {children}
+
+ handleFileImport(hasHeader)}
+ >
+ {wbText.importFile()}
+
+
+ {typeof preview === 'string' ? (
+
+ ) : Array.isArray(preview) ? (
+
+ ) : (
+ loadingGif
+ )}
+ >
+ );
+}
+
+function ToggleHeader({
+ hasHeader,
+ isDisabled,
+ onChange: handleChange,
+}: {
+ readonly hasHeader: boolean;
+ readonly isDisabled: boolean;
+ readonly onChange: (hasHeader: boolean) => void;
+}): JSX.Element {
+ return (
+
+ );
+}
+
+function Preview({
+ preview,
+ hasHeader,
+}: {
+ readonly preview: RA>;
+ readonly hasHeader: boolean;
+}): JSX.Element {
+ const { rows, header } = extractHeader(preview, hasHeader);
+
+ return (
+
+
{wbText.previewDataSet()}
+
+
+
+
+ {header.map((cell, index) => (
+ |
+ {cell}
+ |
+ ))}
+
+
+
+ {rows.map((row, index) => (
+
+ {row.map((cell, index) => (
+ |
+ {cell}
+ |
+ ))}
+
+ ))}
+
+
+
+
+ );
+}
+
+function BadImport({
+ error,
+}: {
+ readonly error: LocalizedString;
+}): JSX.Element {
+ return (
+
+ {wbText.errorImporting()}
+
+ {error}
+
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts b/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts
index df9524741d2..cce7e123874 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts
@@ -4,7 +4,6 @@ import type { LocalizedString } from 'typesafe-i18n';
import { overrideAjax } from '../../../tests/ajax';
import { mockTime } from '../../../tests/helpers';
-testTime;
import { testTime } from '../../../tests/testTime';
import { formatDateForBackEnd } from '../../../utils/parser/dateFormat';
import { formatUrl } from '../../Router/queryString';
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
index b80c2bc23d7..085412ad3dc 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
@@ -1,4 +1,5 @@
import { parse } from 'csv-parse/browser/esm';
+import type { LocalizedString } from 'typesafe-i18n';
import ImportXLSWorker from 'worker-loader!./xls.worker';
import { wbText } from '../../localization/workbench';
@@ -13,7 +14,6 @@ import { tables } from '../DataModel/tables';
import { fileToText } from '../Molecules/FilePicker';
import { uniquifyHeaders } from '../WbPlanView/headerHelper';
import type { Dataset, DatasetBrief } from '../WbPlanView/Wrapped';
-import { LocalizedString } from 'typesafe-i18n';
/**
* REFACTOR: add this ESLint rule:
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
index baef36b11c8..ffe927d3e9c 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
@@ -10,25 +10,18 @@ import type { LocalizedString } from 'typesafe-i18n';
import { useAsyncState } from '../../hooks/useAsyncState';
import { useCachedState } from '../../hooks/useCachedState';
-import { useStateForContext } from '../../hooks/useStateForContext';
import { useTriggerState } from '../../hooks/useTriggerState';
import { wbText } from '../../localization/workbench';
-import type { GetSet, RA } from '../../utils/types';
+import type { GetOrSet, RA } from '../../utils/types';
import { localized } from '../../utils/types';
-import { Container, H2, H3 } from '../Atoms';
-import { Button } from '../Atoms/Button';
-import { Input, Select } from '../Atoms/Form';
+import { Container, H2 } from '../Atoms';
+import { Input } from '../Atoms/Form';
import { LoadingContext } from '../Core/Contexts';
import { useMenuItem } from '../Header/MenuContext';
-import { loadingGif } from '../Molecules';
-import type { AutoCompleteItem } from '../Molecules/AutoComplete';
-import { AutoComplete } from '../Molecules/AutoComplete';
-import { FilePicker } from '../Molecules/FilePicker';
-import { encodings } from '../WorkBench/encodings';
+import { CsvFilePreview, FilePicker, Layout } from '../Molecules/FilePicker';
import {
createDataSet,
extractFileName,
- extractHeader,
getMaxDataSetLength,
inferDataSetType,
parseCsv,
@@ -56,25 +49,43 @@ export function WbImportView(): JSX.Element {
function FilePicked({ file }: { readonly file: File }): JSX.Element {
const fileType = inferDataSetType(file);
+ const getSetDataSetName = useTriggerState(extractFileName(file.name));
+ const [hasHeader = true, setHasHeader] = useCachedState(
+ 'wbImport',
+ 'hasHeader'
+ );
return fileType === 'csv' ? (
-
+
) : (
-
+
);
}
-function CsvPicked({ file }: { readonly file: File }): JSX.Element {
- const [encoding, setEncoding] = React.useState('utf-8');
- const getSetDelimiter = useStateForContext(undefined);
- const preview = useCsvPreview(file, encoding, getSetDelimiter);
+function CsvPicked({
+ file,
+ getSetHasHeader: [hasHeader, setHasHeader],
+ getSetDataSetName: [dataSetName, setDataSetName],
+}: {
+ readonly file: File;
+ readonly getSetHasHeader: GetOrSet;
+ readonly getSetDataSetName: GetOrSet;
+}): JSX.Element {
const loading = React.useContext(LoadingContext);
const navigate = useNavigate();
return (
-
+ {
loading(
parseCsv(file, encoding, getSetDelimiter)
.then(async (data) =>
@@ -86,166 +97,11 @@ function CsvPicked({ file }: { readonly file: File }): JSX.Element {
})
)
.then(({ id }) => navigate(`/specify/workbench/${id}/`))
- )
- }
+ );
+ }}
>
-
-
-
- );
-}
-
-function useCsvPreview(
- file: File,
- encoding: string,
- getSetDelimiter: GetSet
-): LocalizedString | RA> | undefined {
- const [delimiter, setDelimiter] = getSetDelimiter;
- const [preview] = useAsyncState>>(
- React.useCallback(
- async () =>
- parseCsv(
- file,
- encoding,
- [delimiter, setDelimiter],
- wbImportPreviewSize
- ).catch((error) => localized(error.message)),
- [file, encoding, delimiter, setDelimiter]
- ),
- false
- );
- return preview;
-}
-
-function ChooseEncoding({
- encoding = '',
- isDisabled,
- onChange: handleChange,
-}: {
- readonly encoding: string;
- readonly isDisabled: boolean;
- readonly onChange: (encoding: string) => void;
-}): JSX.Element {
- return (
-
- );
-}
-
-const delimiters: RA> = [
- { label: wbText.comma(), searchValue: ',', data: ',' },
- { label: wbText.tab(), searchValue: '\t', data: '\t' },
- { label: wbText.semicolon(), searchValue: ';', data: ';' },
- { label: wbText.space(), searchValue: ' ', data: ' ' },
- { label: wbText.pipe(), searchValue: '|', data: '|' },
-];
-
-function ChooseDelimiter({
- isDisabled,
- getSetDelimiter: [delimiter, handleChange],
-}: {
- readonly isDisabled: boolean;
- readonly getSetDelimiter: GetSet;
-}): JSX.Element {
- const [state, setState] = useTriggerState(delimiter);
-
- /**
- * Don't disable the component if it is currently focused, as disabling it
- * would lead to focus loss, which is bad UX and an accessibility issue.
- */
- const inputRef = React.useRef(null);
- const isFocused = inputRef.current === document.activeElement;
- const disabled = isDisabled && !isFocused;
-
- return (
-
- );
-}
-
-function Layout({
- fileName,
- preview,
- children,
- onImport: handleImport,
-}: {
- readonly fileName: string;
- readonly preview: LocalizedString | RA> | undefined;
- readonly children?: JSX.Element | RA;
- readonly onImport: (dataSetName: string, hasHeader: boolean) => void;
-}): JSX.Element {
- const [dataSetName, setDataSetName] = useTriggerState(
- extractFileName(fileName)
- );
- const [hasHeader = true, setHasHeader] = useCachedState(
- 'wbImport',
- 'hasHeader'
- );
- return (
- <>
-
- {children}
-
-
- handleImport(dataSetName, hasHeader)}
- >
- {wbText.importFile()}
-
-
- {typeof preview === 'string' ? (
-
- ) : Array.isArray(preview) ? (
-
- ) : (
- loadingGif
- )}
- >
+
+
);
}
@@ -270,96 +126,23 @@ function ChooseName({
);
}
-function ToggleHeader({
- hasHeader,
- isDisabled,
- onChange: handleChange,
-}: {
- readonly hasHeader: boolean;
- readonly isDisabled: boolean;
- readonly onChange: (hasHeader: boolean) => void;
-}): JSX.Element {
- return (
-
- );
-}
-
-function BadImport({
- error,
+function XlsPicked({
+ file,
+ getSetHasHeader,
+ getSetDataSetName: [dataSetName, setDataSetName],
}: {
- readonly error: LocalizedString;
+ readonly file: File;
+ readonly getSetHasHeader: GetOrSet;
+ readonly getSetDataSetName: GetOrSet;
}): JSX.Element {
- return (
-
- {wbText.errorImporting()}
-
- {error}
-
- );
-}
-
-function Preview({
- preview,
- hasHeader,
-}: {
- readonly preview: RA>;
- readonly hasHeader: boolean;
-}): JSX.Element {
- const { rows, header } = extractHeader(preview, hasHeader);
-
- return (
-
-
{wbText.previewDataSet()}
-
-
-
-
- {header.map((cell, index) => (
- |
- {cell}
- |
- ))}
-
-
-
- {rows.map((row, index) => (
-
- {row.map((cell, index) => (
- |
- {cell}
- |
- ))}
-
- ))}
-
-
-
-
- );
-}
-
-function XlsPicked({ file }: { readonly file: File }): JSX.Element {
const preview = useXlsPreview(file);
const loading = React.useContext(LoadingContext);
const navigate = useNavigate();
return (
+ onFileImport={(hasHeader): void =>
loading(
parseXls(file)
.then(async (data) =>
@@ -373,7 +156,9 @@ function XlsPicked({ file }: { readonly file: File }): JSX.Element {
.then(({ id }) => navigate(`/specify/workbench/${id}/`))
)
}
- />
+ >
+
+
);
}
From 88efe087ef0215611cc076fc1b2aeae00ee888be Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 27 Dec 2023 20:52:09 -0600
Subject: [PATCH 02/71] Setup routes and define 'skeleton' component
---
.../js_src/lib/components/Atoms/Icons.tsx | 3 +++
.../lib/components/Header/CoGeImport.tsx | 19 +++++++++++++++++++
.../components/Header/userToolDefinitions.ts | 8 ++++++++
.../js_src/lib/components/Router/Routes.tsx | 12 ++++++++++++
.../js_src/lib/localization/header.ts | 3 +++
5 files changed, 45 insertions(+)
create mode 100644 specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx
diff --git a/specifyweb/frontend/js_src/lib/components/Atoms/Icons.tsx b/specifyweb/frontend/js_src/lib/components/Atoms/Icons.tsx
index 96500353f2d..df659590a04 100644
--- a/specifyweb/frontend/js_src/lib/components/Atoms/Icons.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Atoms/Icons.tsx
@@ -87,6 +87,9 @@ export const icons = {
fingerPrint: ,
gallery:,
+ globe: ,
hashtag: ,
// This icon is not from Heroicons. It was drawn by @grantfitzsimmons
diff --git a/specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx b/specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx
new file mode 100644
index 00000000000..2652c76ef67
--- /dev/null
+++ b/specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx
@@ -0,0 +1,19 @@
+import React from 'react';
+
+import { useStateForContext } from '../../hooks/useStateForContext';
+import { headerText } from '../../localization/header';
+import { CsvFilePicker } from '../Molecules/FilePicker';
+import { OverlayContext } from '../Router/Router';
+
+export function ImportFromCoge(): JSX.Element {
+ const handleClose = React.useContext(OverlayContext);
+ const getSetHeader = useStateForContext(true);
+ return (
+ console.error('yo!')}
+ onFileSelected={(file): void => {}}
+ />
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
index 5d980be535b..78b884d18b0 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
+++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
@@ -99,6 +99,14 @@ const rawUserTools = ensure>>>()({
icon: icons.rss,
},
},
+ [commonText.import()]: {
+ coGeImport: {
+ title: headerText.coGeImportDataset(),
+ enabled: () => true,
+ url: '/specify/import/from-coge/',
+ icon: icons.globe,
+ },
+ },
[headerText.documentation()]: {
aboutSpecify: {
title: welcomeText.aboutSpecify(),
diff --git a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
index 70b41e9eb52..d04fd662d00 100644
--- a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
@@ -225,6 +225,18 @@ export const routes: RA = [
path: 'workbench-import',
element: ,
},
+ {
+ path: 'import',
+ children: [
+ {
+ path: 'from-coge',
+ element: () =>
+ import('../Header/CoGeImport').then(
+ ({ ImportFromCoge: CoGeImportOverlay }) => CoGeImportOverlay
+ ),
+ },
+ ],
+ },
{
path: 'resources',
title: resourcesText.appResources(),
diff --git a/specifyweb/frontend/js_src/lib/localization/header.ts b/specifyweb/frontend/js_src/lib/localization/header.ts
index 6a46a97fff8..90e7c17986b 100644
--- a/specifyweb/frontend/js_src/lib/localization/header.ts
+++ b/specifyweb/frontend/js_src/lib/localization/header.ts
@@ -142,6 +142,9 @@ export const headerText = createDictionary({
abgeschlossen ist.
`,
},
+ coGeImportDataset: {
+ 'en-us': 'Import CoGe Dataset'
+ },
labelName: {
'en-us': 'Label Name',
'ru-ru': 'Название ярлыка',
From ee69e2e3d9e5616ef7a2fbb044e83c1a9422fc28 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Fri, 26 Jan 2024 07:21:31 -0600
Subject: [PATCH 03/71] Finish frontend implementation
---
.../lib/components/Header/CoGeImport.tsx | 19 ---
.../components/Header/ImportLocalitySet.tsx | 119 ++++++++++++++++++
.../lib/components/Molecules/FilePicker.tsx | 56 ++++++---
.../js_src/lib/components/PickLists/index.tsx | 1 +
.../js_src/lib/components/Router/Routes.tsx | 4 +-
.../js_src/lib/components/WbImport/index.tsx | 2 +-
.../js_src/lib/localization/locality.ts | 13 ++
specifyweb/specify/urls.py | 4 +
specifyweb/specify/views.py | 36 ++++++
9 files changed, 213 insertions(+), 41 deletions(-)
delete mode 100644 specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx
create mode 100644 specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
diff --git a/specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx b/specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx
deleted file mode 100644
index 2652c76ef67..00000000000
--- a/specifyweb/frontend/js_src/lib/components/Header/CoGeImport.tsx
+++ /dev/null
@@ -1,19 +0,0 @@
-import React from 'react';
-
-import { useStateForContext } from '../../hooks/useStateForContext';
-import { headerText } from '../../localization/header';
-import { CsvFilePicker } from '../Molecules/FilePicker';
-import { OverlayContext } from '../Router/Router';
-
-export function ImportFromCoge(): JSX.Element {
- const handleClose = React.useContext(OverlayContext);
- const getSetHeader = useStateForContext(true);
- return (
- console.error('yo!')}
- onFileSelected={(file): void => {}}
- />
- );
-}
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
new file mode 100644
index 00000000000..257b804e654
--- /dev/null
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -0,0 +1,119 @@
+import React from 'react';
+
+import { commonText } from '../../localization/common';
+import { headerText } from '../../localization/header';
+import { localityText } from '../../localization/locality';
+import { ajax } from '../../utils/ajax';
+import type { RA } from '../../utils/types';
+import { H2 } from '../Atoms';
+import { Button } from '../Atoms/Button';
+import { Submit } from '../Atoms/Submit';
+import { tables } from '../DataModel/tables';
+import type { Tables } from '../DataModel/types';
+import { Dialog } from '../Molecules/Dialog';
+import { CsvFilePicker } from '../Molecules/FilePicker';
+
+type Header = Exclude<
+ Lowercase<
+ | keyof Tables['GeoCoordDetail']['fields']
+ | keyof Tables['Locality']['fields']
+ >,
+ 'locality'
+>;
+
+const acceptedLocalityFields: RA<
+ Lowercase
+> = ['guid', 'datum', 'latitude1', 'longitude1'];
+
+const acceptedHeaders = new Set([
+ ...acceptedLocalityFields,
+ ...tables.GeoCoordDetail.literalFields
+ .map(({ name }) => name.toLowerCase())
+ .filter((header) => header !== 'locality'),
+]);
+
+const requiredHeaders = new Set(['guid']);
+
+export function ImportLocalitySet(): JSX.Element {
+ const [headerErrors, setHeaderErrors] = React.useState({
+ missingRequiredHeaders: [] as RA,
+ unrecognizedHeaders: [] as RA,
+ });
+
+ return (
+ <>
+ {
+ const headers = data[0];
+ const foundHeaderErrors = headers.reduce(
+ (accumulator, currentHeader) => {
+ const parsedHeader = currentHeader.toLowerCase() as Header;
+ const isUnknown = !acceptedHeaders.has(parsedHeader);
+
+ return {
+ missingRequiredHeaders:
+ accumulator.missingRequiredHeaders.filter(
+ (header) => header !== parsedHeader
+ ),
+ unrecognizedHeaders: isUnknown
+ ? [...accumulator.unrecognizedHeaders, currentHeader]
+ : accumulator.unrecognizedHeaders,
+ };
+ },
+ {
+ missingRequiredHeaders: Array.from(requiredHeaders) as RA,
+ unrecognizedHeaders: [] as RA,
+ }
+ );
+ setHeaderErrors(foundHeaderErrors);
+ if (
+ Object.values(foundHeaderErrors).some((errors) => errors.length > 0)
+ )
+ return;
+ }}
+ />
+ {Object.values(headerErrors).some((errors) => errors.length > 0) && (
+
+ )}
+ >
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
index 402f64fcafd..87d1876f4d6 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
@@ -195,28 +195,31 @@ export const fileToText = async (
export function CsvFilePicker({
header,
- getSetHasHeader,
onFileImport: handleFileImport,
- onFileSelected: handleFileSelected,
}: {
readonly header: LocalizedString;
- readonly getSetHasHeader: GetSet;
- readonly onFileImport: (hasHeader: boolean) => void;
- readonly onFileSelected?: (file: File) => void;
+ readonly onFileImport: ({
+ data,
+ hasHeader,
+ encoding,
+ getSetDelimiter,
+ }: {
+ readonly data: RA>;
+ readonly hasHeader: boolean;
+ readonly encoding: string;
+ readonly getSetDelimiter: GetOrSet;
+ }) => void;
}): JSX.Element {
const [file, setFile] = React.useState();
+ const getSetHasHeader = useStateForContext(true);
return (
{header}
{
- if (typeof handleFileSelected === 'function')
- handleFileSelected(file);
- setFile(file);
- }}
+ acceptedFormats={['.csv', '.tsv', '.psv', '.txt']}
+ onFileSelected={(file): void => setFile(file)}
/>
{typeof file === 'object' && (
@@ -239,11 +242,17 @@ export function CsvFilePreview({
readonly file: File;
readonly getSetHasHeader: GetOrSet;
readonly children?: JSX.Element | undefined;
- readonly onFileImport: (
- hasHeader: boolean,
- encoding: string,
- getSetDelimiter: GetOrSet
- ) => void;
+ readonly onFileImport: ({
+ data,
+ hasHeader,
+ encoding,
+ getSetDelimiter,
+ }: {
+ readonly data: RA>;
+ readonly hasHeader: boolean;
+ readonly encoding: string;
+ readonly getSetDelimiter: GetOrSet;
+ }) => void;
}): JSX.Element {
const [encoding, setEncoding] = React.useState('utf-8');
const getSetDelimiter = useStateForContext(undefined);
@@ -253,9 +262,18 @@ export function CsvFilePreview({
- handleFileImport(hasHeader, encoding, getSetDelimiter)
- }
+ onFileImport={(hasHeader): void => {
+ if (!Array.isArray(preview)) {
+ console.error('Failed to parse data for File ', file.name, preview);
+ return;
+ }
+ handleFileImport({
+ data: preview,
+ hasHeader,
+ encoding,
+ getSetDelimiter,
+ });
+ }}
>
{children === undefined ? <>> : children}
({
value: defaultValue,
required: rawIsRequired,
+ type: 'text',
}),
[defaultValue, rawIsRequired]
)
diff --git a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
index d04fd662d00..4133ce2730b 100644
--- a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
@@ -231,8 +231,8 @@ export const routes: RA = [
{
path: 'from-coge',
element: () =>
- import('../Header/CoGeImport').then(
- ({ ImportFromCoge: CoGeImportOverlay }) => CoGeImportOverlay
+ import('../Header/ImportLocalitySet').then(
+ ({ ImportLocalitySet }) => ImportLocalitySet
),
},
],
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
index ffe927d3e9c..24f18e42ca3 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
@@ -85,7 +85,7 @@ function CsvPicked({
{
+ onFileImport={({ hasHeader, encoding, getSetDelimiter }): void => {
loading(
parseCsv(file, encoding, getSetDelimiter)
.then(async (data) =>
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 7d068be92e9..06559dc505e 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -292,4 +292,17 @@ export const localityText = createDictionary({
'uk-ua': 'DD MM SS.SS N/S/E/W (32 45 42.84 N)',
'de-ch': 'DD MM SS.SS N/S/O/W (32 45 42.84 N)',
},
+ localityImportHeaderError: {
+ 'en-us': 'Errors Found in Column Headers',
+ },
+ localityImportMissingHeader: {
+ 'en-us': 'The following columns are required but missing in the dataset',
+ },
+ localityImportUnrecognizedHeaders: {
+ 'en-us':
+ 'The following columns in the dataset are not recognized and will be ignored on import',
+ },
+ localityImportedAcceptedHeaders: {
+ 'en-us': 'Only the following headers are accepted',
+ },
} as const);
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index b8324cdffeb..418d2e5df34 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -38,6 +38,10 @@
url(r'^repair/$', tree_views.repair_tree),
])),
+ url(r'^import/', include([
+ url(r'^locality_set/$', views.import_locality_set)
+ ])),
+
# generates Sp6 master key
url(r'^master_key/$', master_key.master_key),
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 34884a577f4..86eec173a1d 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -767,3 +767,39 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
else:
return http.HttpResponse(f'Task {merge.taskid} is not running and cannot be aborted.')
+
+
+@openapi(schema={
+ "post": {
+ "requestBody": {
+ "required": True,
+ "description": "Replace a list of old records with a new record.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "columns": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+})
+def import_locality_set(request):
+ pass
From 1c7f0e3800d02ce57333bef4852c6b613ededaa3 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 14 Feb 2024 12:58:58 -0600
Subject: [PATCH 04/71] Generalize backend parsing behavior
---
specifyweb/specify/parse.py | 260 ++++++++++++++++
specifyweb/workbench/upload/parsing.py | 291 ++++--------------
.../upload/tests/test_upload_results_json.py | 2 +-
.../workbench/upload/tests/testparsing.py | 31 +-
specifyweb/workbench/upload/treerecord.py | 8 +-
specifyweb/workbench/upload/upload_result.py | 6 +-
specifyweb/workbench/upload/upload_table.py | 4 +-
7 files changed, 338 insertions(+), 264 deletions(-)
create mode 100644 specifyweb/specify/parse.py
diff --git a/specifyweb/specify/parse.py b/specifyweb/specify/parse.py
new file mode 100644
index 00000000000..3b739f01710
--- /dev/null
+++ b/specifyweb/specify/parse.py
@@ -0,0 +1,260 @@
+import re
+import math
+
+from typing import Dict, List, Tuple, Any, NamedTuple, Union, Optional
+from datetime import datetime
+from decimal import Decimal
+
+from specifyweb.specify import models
+from specifyweb.specify.agent_types import agent_types
+from specifyweb.stored_queries.format import get_date_format, MYSQL_TO_YEAR, MYSQL_TO_MONTH
+from specifyweb.specify.datamodel import datamodel, Table, Field, Relationship
+from specifyweb.specify.uiformatters import get_uiformatter, FormatMismatch
+
+
+class ParseFailure(NamedTuple):
+ message: str
+ paylod: Dict[str, Any]
+
+ def to_json(self) -> List:
+ return list(self)
+
+
+class ParseSucess(NamedTuple):
+ to_upload: Dict[str, Any]
+
+
+ParseResult = Union[ParseSucess, ParseFailure]
+
+
+def parse_field(collection, table_name: str, field_name: str, raw_value: str) -> ParseResult:
+ table = datamodel.get_table_strict(table_name)
+ field = table.get_field_strict(field_name)
+
+ formatter = get_uiformatter(collection, table_name, field_name)
+
+ if field.is_relationship:
+ return parse_integer(field.name, raw_value)
+
+ if formatter is not None:
+ return parse_formatted(collection, formatter, table, field, raw_value)
+
+ if is_latlong(table, field):
+ return parse_latlong(field, raw_value)
+
+ if is_agenttype(table, field):
+ return parse_agenttype(raw_value)
+
+ if field.is_temporal():
+ date_format = get_date_format() or "%Y-%m-%d"
+ return parse_date(table, field_name, date_format, raw_value)
+
+ if field.type == "java.lang.Boolean":
+ return parse_boolean(field_name, raw_value)
+
+ if field.type == 'java.math.BigDecimal':
+ return parse_decimal(field_name, raw_value)
+
+ if field.type in ('java.lang.Float', 'java.lang.Double'):
+ return parse_float(field_name, raw_value)
+
+ if field.type in ('java.lang.Integer', 'java.lang.Long', 'java.lang.Byte', 'java.lang.Short'):
+ return parse_integer(field_name, raw_value)
+
+ if hasattr(field, 'length') and len(raw_value) > field.length:
+ return ParseFailure('valueTooLong', {'field': field_name, 'maxLength': field.length})
+
+ return ParseSucess({field_name.lower(): raw_value})
+
+
+def parse_string(value: str) -> Optional[str]:
+ result = value.strip()
+ if result == "":
+ return None
+ return result
+
+
+def parse_integer(field_name: str, value: str) -> ParseResult:
+ try:
+ result = int(value)
+ except ValueError as e:
+ return ParseFailure('failedParsingDecimal', {'value': value, 'field': field_name})
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_float(field_name: str, value: str) -> ParseResult:
+ try:
+ result = float(value)
+ except ValueError as e:
+ return ParseFailure('failedParsingFloat', {'value': value, 'field': field_name})
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_decimal(field_name: str, value: str) -> ParseResult:
+ try:
+ result = Decimal(value)
+ except Exception as e:
+ return ParseFailure(
+ 'failedParsingDecimal',
+ {'value': value, 'field': field_name}
+ )
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_boolean(field_name: str, value: str) -> ParseResult:
+ if value.lower() in ["yes", "true"]:
+ result = True
+ elif value.lower() in ["no", "false"]:
+ result = False
+ else:
+ return ParseFailure(
+ 'failedParsingBoolean',
+ {'value': value, 'field': field_name}
+ )
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_date(table: Table, field_name: str, dateformat: str, value: str) -> ParseResult:
+ if re.search('[0-9]{4}', value) is None:
+ return ParseFailure('invalidYear', {'value': value})
+
+ dateformat = dateformat.replace('%y', '%Y')
+ precision_field = table.get_field(field_name + 'precision')
+ if precision_field is None:
+ try:
+ date = datetime.strptime(value, dateformat).date()
+ except ValueError:
+ return ParseFailure('badDateFormat', {'value': value, 'format': dateformat})
+ return ParseSucess({field_name.lower(): date})
+
+ date_formats = [
+ dateformat,
+ MYSQL_TO_MONTH[dateformat],
+ MYSQL_TO_YEAR[dateformat],
+ dateformat.replace('%d', '00'),
+ re.sub('(%m)|(%d)', '00', dateformat),
+ ]
+
+ for df in date_formats:
+ try:
+ date = datetime.strptime(value, df).date()
+ except ValueError:
+ continue
+ if '%d' in df:
+ return ParseSucess({field_name.lower(): date, precision_field.name.lower(): 1})
+ elif '%m' in df or '%b' in df:
+ return ParseSucess({field_name.lower(): date.replace(day=1), precision_field.name.lower(): 2})
+ else:
+ return ParseSucess({field_name.lower(): date.replace(day=1, month=1), precision_field.name.lower(): 3})
+
+ return ParseFailure('badDateFormat', {'value': value, 'format': dateformat})
+
+
+def parse_formatted(collection, uiformatter, table: Table, field: Union[Field, Relationship], value: str) -> ParseResult:
+ try:
+ parsed = uiformatter.parse(value)
+ except FormatMismatch as e:
+ return ParseFailure(e.args[0], {})
+
+ if uiformatter.needs_autonumber(parsed):
+ canonicalized = uiformatter.autonumber_now(
+ collection, getattr(models, table.django_name), parsed)
+ else:
+ canonicalized = uiformatter.canonicalize(parsed)
+
+ if hasattr(field, 'length') and len(canonicalized) > field.length:
+ return ParseFailure('valueTooLong', {'maxLength': field.length})
+
+ return ParseSucess({field.name.lower(): canonicalized})
+
+
+def parse_agenttype(value: str) -> ParseResult:
+ value = value.capitalize()
+ try:
+ agenttype = agent_types.index(value)
+ except ValueError:
+ return ParseFailure('failedParsingAgentType', {'badType': value, 'validTypes': agent_types})
+ return ParseSucess({'agenttype': agenttype})
+
+
+def is_latlong(table: Table, field: Field) -> bool:
+ return table.name == 'Locality' \
+ and field.name in ('latitude1', 'longitude1', 'latitude2', 'longitude2')
+
+
+def is_agenttype(table: Table, field: Field) -> bool:
+ return table.name == "Agent" and field.name.lower() == 'agenttype'
+
+
+def parse_latlong(field: Field, value: str) -> ParseResult:
+ parsed = parse_coord(value)
+
+ if parsed is None:
+ return None
+
+ coord, unit = parsed
+ if field.name.startswith('lat') and abs(coord) >= 90:
+ return ParseFailure("latitudeOutOfRange", {'value': value})
+
+ if field.name.startswith('long') and abs(coord) >= 180:
+ return ParseFailure('longitudeOutOfRange', {'value': value})
+
+ return ParseSucess({field.name.lower(): coord,
+ 'originallatlongunit': unit,
+ field.name.lower().replace('itude', '') + 'text': parse_string(value)}),
+
+
+def parse_coord(value: str) -> Optional[Tuple[float, int]]:
+ for p in LATLONG_PARSER_DEFS:
+ match = re.compile(p.regex, re.I).match(value)
+ if match and match.group(1):
+ try:
+ # relies on signed zeros in floats
+ # see https://docs.python.org/3/library/math.html#math.copysign
+ comps = [float(match.group(i)) for i in p.comp_groups]
+ except ValueError:
+ continue
+ result, divisor = 0.0, 1
+ for comp in comps:
+ result += abs(comp) / divisor
+ divisor *= 60
+ result = math.copysign(result, comps[0])
+ if match.group(p.dir_group).lower() in ("s", "w"):
+ result = -result
+ return (result, p.unit)
+ return None
+
+
+class LatLongParserDef(NamedTuple):
+ regex: str
+ comp_groups: List[int]
+ dir_group: int
+ unit: int
+
+
+LATLONG_PARSER_DEFS = [
+ LatLongParserDef(
+ r'^(-?\d{0,3}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
+ [1],
+ 3,
+ 0
+ ),
+
+ LatLongParserDef(
+ r'^(-?\d{1,3})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
+ [1, 2],
+ 4,
+ 2
+ ),
+
+ LatLongParserDef(
+ r'^(-?\d{1,3})[^\d\.]+(\d{1,2})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
+ [1, 2, 3],
+ 5,
+ 1
+ ),
+]
diff --git a/specifyweb/workbench/upload/parsing.py b/specifyweb/workbench/upload/parsing.py
index 3a6a717cdf1..73ed34931c0 100644
--- a/specifyweb/workbench/upload/parsing.py
+++ b/specifyweb/workbench/upload/parsing.py
@@ -1,37 +1,37 @@
-
import logging
-import math
-import re
-from datetime import datetime
-from decimal import Decimal
from typing import Dict, Any, Optional, List, NamedTuple, Tuple, Union, NoReturn
from django.core.exceptions import ObjectDoesNotExist
-from specifyweb.specify import models
-from specifyweb.specify.datamodel import datamodel, Table
-from specifyweb.specify.uiformatters import FormatMismatch
-from specifyweb.stored_queries.format import MYSQL_TO_YEAR, MYSQL_TO_MONTH
+from specifyweb.specify.datamodel import datamodel
from .column_options import ExtendedColumnOptions
+from specifyweb.specify.parse import parse_field, is_latlong, ParseSucess, ParseFailure
Row = Dict[str, str]
Filter = Dict[str, Any]
logger = logging.getLogger(__name__)
+
class PicklistAddition(NamedTuple):
picklist: Any
column: str
value: str
-class ParseFailure(NamedTuple):
+
+class WorkBenchParseFailure(NamedTuple):
message: str
payload: Dict[str, Union[str, int, List[str], List[int]]]
column: str
+ @classmethod
+ def from_parse_failure(cls, pf: ParseFailure, column: str):
+ return cls(message=pf.message, payload=pf.paylod, column=column)
+
def to_json(self) -> List:
return list(self)
+
class ParseResult(NamedTuple):
filter_on: Filter
upload: Dict[str, Any]
@@ -39,27 +39,35 @@ class ParseResult(NamedTuple):
column: str
missing_required: Optional[str]
+ @classmethod
+ def from_parse_success(cls, ps: ParseSucess, filter_on: Filter, add_to_picklist: Optional[PicklistAddition], column: str, missing_required: Optional[str]):
+ return cls(filter_on=filter_on, upload=ps.to_upload, add_to_picklist=add_to_picklist, column=column, missing_required=missing_required)
+
def match_key(self) -> str:
from .uploadable import filter_match_key
return filter_match_key(self.filter_on)
+
def filter_and_upload(f: Filter, column: str) -> ParseResult:
return ParseResult(f, f, None, column, None)
-def parse_many(collection, tablename: str, mapping: Dict[str, ExtendedColumnOptions], row: Row) -> Tuple[List[ParseResult], List[ParseFailure]]:
+
+def parse_many(collection, tablename: str, mapping: Dict[str, ExtendedColumnOptions], row: Row) -> Tuple[List[ParseResult], List[WorkBenchParseFailure]]:
results = [
- parse_value(collection, tablename, fieldname, row[colopts.column], colopts)
+ parse_value(collection, tablename, fieldname,
+ row[colopts.column], colopts)
for fieldname, colopts in mapping.items()
]
return (
[r for r in results if isinstance(r, ParseResult)],
- [r for r in results if isinstance(r, ParseFailure)]
+ [r for r in results if isinstance(r, WorkBenchParseFailure)]
)
-def parse_value(collection, tablename: str, fieldname: str, value_in: str, colopts: ExtendedColumnOptions) -> Union[ParseResult, ParseFailure]:
+
+def parse_value(collection, tablename: str, fieldname: str, value_in: str, colopts: ExtendedColumnOptions) -> Union[ParseResult, WorkBenchParseFailure]:
required_by_schema = colopts.schemaitem and colopts.schemaitem.isrequired
- result: Union[ParseResult, ParseFailure]
+ result: Union[ParseResult, WorkBenchParseFailure]
was_blank = value_in.strip() == ""
if was_blank:
if colopts.default is None:
@@ -68,13 +76,16 @@ def parse_value(collection, tablename: str, fieldname: str, value_in: str, colop
"field is required by schema config" if required_by_schema else
None
)
- result = ParseResult({fieldname: None}, {}, None, colopts.column, missing_required)
+ result = ParseResult({fieldname: None}, {},
+ None, colopts.column, missing_required)
else:
- result = _parse(collection, tablename, fieldname, colopts, colopts.default)
+ result = _parse(collection, tablename, fieldname,
+ colopts, colopts.default)
else:
- result = _parse(collection, tablename, fieldname, colopts, value_in.strip())
+ result = _parse(collection, tablename, fieldname,
+ colopts, value_in.strip())
- if isinstance(result, ParseFailure):
+ if isinstance(result, WorkBenchParseFailure):
return result
if colopts.matchBehavior == "ignoreAlways":
@@ -90,18 +101,16 @@ def parse_value(collection, tablename: str, fieldname: str, value_in: str, colop
assertNever(colopts.matchBehavior)
-def _parse(collection, tablename: str, fieldname: str, colopts: ExtendedColumnOptions, value: str) -> Union[ParseResult, ParseFailure]:
- if tablename.lower() == 'agent' and fieldname.lower() == 'agenttype':
- return parse_agenttype(value, colopts.column)
-
+def _parse(collection, tablename: str, fieldname: str, colopts: ExtendedColumnOptions, value: str) -> Union[ParseResult, WorkBenchParseFailure]:
table = datamodel.get_table_strict(tablename)
field = table.get_field_strict(fieldname)
if colopts.picklist:
- result = parse_with_picklist(collection, colopts.picklist, fieldname, value, colopts.column)
+ result = parse_with_picklist(
+ collection, colopts.picklist, fieldname, value, colopts.column)
if result is not None:
if isinstance(result, ParseResult) and hasattr(field, 'length') and len(result.upload[fieldname]) > field.length:
- return ParseFailure(
+ return WorkBenchParseFailure(
'pickListValueTooLong',
{
'pickList': colopts.picklist.name,
@@ -111,247 +120,51 @@ def _parse(collection, tablename: str, fieldname: str, colopts: ExtendedColumnOp
)
return result
- if field.is_relationship:
- return parse_integer(fieldname, value, colopts.column)
-
- if colopts.uiformatter:
- try:
- parsed = colopts.uiformatter.parse(value)
- except FormatMismatch as e:
- return ParseFailure(e.args[0], {}, colopts.column)
-
- if colopts.uiformatter.needs_autonumber(parsed):
- canonicalized = colopts.uiformatter.autonumber_now(collection, getattr(models, tablename.capitalize()), parsed)
- else:
- canonicalized = colopts.uiformatter.canonicalize(parsed)
-
- if hasattr(field, 'length') and len(canonicalized) > field.length:
- return ParseFailure('valueTooLong',{'maxLength':field.length}, colopts.column)
-
- return filter_and_upload({fieldname: canonicalized}, colopts.column)
-
- if is_latlong(table, field):
- return parse_latlong(field, value, colopts.column)
+ parsed = parse_field(collection, tablename, fieldname, value)
- if field.is_temporal():
- return parse_date(table, fieldname, colopts.dateformat or "%Y-%m-%d", value, colopts.column)
+ if is_latlong(table, field) and isinstance(parsed, ParseSucess):
+ coord_text_field = field.name.replace('itude', '') + 'text'
+ filter_on = {coord_text_field: parsed.to_upload[coord_text_field]}
+ return ParseResult.from_parse_success(parsed, filter_on, None, colopts.column, None)
- if field.type == "java.lang.Boolean":
- return parse_boolean(fieldname, value, colopts.column)
-
- if field.type == 'java.math.BigDecimal':
- return parse_decimal(fieldname, value, colopts.column)
-
- if field.type in ('java.lang.Float', 'java.lang.Double'):
- return parse_float(fieldname, value, colopts.column)
-
- if field.type in ('java.lang.Integer', 'java.lang.Long', 'java.lang.Byte', 'java.lang.Short'):
- return parse_integer(fieldname, value, colopts.column)
-
- if hasattr(field, 'length') and len(value) > field.length:
- return ParseFailure('valueTooLong', {'maxLength':field.length}, colopts.column)
+ if isinstance(parsed, ParseFailure):
+ return WorkBenchParseFailure.from_parse_failure(parsed, colopts.column)
+ else:
+ return ParseResult.from_parse_success(parsed, parsed.to_upload, None, colopts.column, None)
- return filter_and_upload({fieldname: value}, colopts.column)
-def parse_boolean(fieldname: str, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- if value.lower() in ["yes", "true"]:
- result = True
- elif value.lower() in ["no", "false"]:
- result = False
- else:
- return ParseFailure(
- 'failedParsingBoolean',
- {'value': value},
- column
- )
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_decimal(fieldname: str, value: str, column) -> Union[ParseResult, ParseFailure]:
- try:
- result = Decimal(value)
- except Exception as e:
- return ParseFailure(
- 'failedParsingDecimal',
- {'value': value},
- column
- )
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_float(fieldname: str, value: str, column) -> Union[ParseResult, ParseFailure]:
- try:
- result = float(value)
- except ValueError as e:
- return ParseFailure('failedParsingFloat', {'value': value}, column)
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_integer(fieldname: str, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- try:
- result = int(value)
- except ValueError as e:
- return ParseFailure('failedParsingDecimal', {'value': value}, column)
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_with_picklist(collection, picklist, fieldname: str, value: str, column: str) -> Union[ParseResult, ParseFailure, None]:
- if picklist.type == 0: # items from picklistitems table
+def parse_with_picklist(collection, picklist, fieldname: str, value: str, column: str) -> Union[ParseResult, WorkBenchParseFailure, None]:
+ if picklist.type == 0: # items from picklistitems table
try:
item = picklist.picklistitems.get(title=value)
return filter_and_upload({fieldname: item.value}, column)
except ObjectDoesNotExist:
if picklist.readonly:
- return ParseFailure(
+ return WorkBenchParseFailure(
'failedParsingPickList',
{'value': value},
column
)
else:
return filter_and_upload({fieldname: value}, column)._replace(
- add_to_picklist=PicklistAddition(picklist=picklist, column=column, value=value)
+ add_to_picklist=PicklistAddition(
+ picklist=picklist, column=column, value=value)
)
return filter_and_upload({fieldname: value})
- elif picklist.type == 1: # items from rows in some table
+ elif picklist.type == 1: # items from rows in some table
# we ignore this type of picklist because it is primarily used to choose many-to-one's on forms
# so it is not expected to appear on actual fields
return None
- elif picklist.type == 2: # items from a field in some table
+ elif picklist.type == 2: # items from a field in some table
# this picklist type is rarely used and seems mostly for convenience on forms to allow
# quickly selecting existing values from other rows in the same table. e.g. moleculeType
return None
else:
- raise NotImplementedError("unknown picklist type {}".format(picklist.type))
-
-def parse_agenttype(value: str, column: str) -> Union[ParseResult, ParseFailure]:
- agenttypes = ['Organization', 'Person', 'Other', 'Group']
-
- value = value.capitalize()
- try:
- agenttype = agenttypes.index(value)
- except ValueError:
- return ParseFailure('failedParsingAgentType', {'badType': value, 'validTypes': agenttypes}, column)
- return filter_and_upload({'agenttype': agenttype}, column)
-
-def parse_date(table: Table, fieldname: str, dateformat: str, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- if re.search('[0-9]{4}', value) is None:
- return ParseFailure('invalidYear',{'value':value}, column)
-
- dateformat = dateformat.replace('%y', '%Y')
- precision_field = table.get_field(fieldname + 'precision')
- if precision_field is None:
- try:
- date = datetime.strptime(value, dateformat).date()
- except ValueError:
- return ParseFailure('badDateFormat', {'value':value,'format':dateformat}, column)
- return filter_and_upload({fieldname: date}, column)
-
- date_formats = [
- dateformat,
- MYSQL_TO_MONTH[dateformat],
- MYSQL_TO_YEAR[dateformat],
- dateformat.replace('%d', '00'),
- re.sub('(%m)|(%d)', '00', dateformat),
- ]
-
- for df in date_formats:
- try:
- date = datetime.strptime(value, df).date()
- except ValueError:
- continue
- if '%d' in df:
- return filter_and_upload({fieldname: date, precision_field.name.lower(): 1}, column)
- elif '%m' in df or '%b' in df:
- return filter_and_upload({fieldname: date.replace(day=1), precision_field.name.lower(): 2}, column)
- else:
- return filter_and_upload({fieldname: date.replace(day=1, month=1), precision_field.name.lower(): 3}, column)
-
- return ParseFailure('badDateFormat', {'value':value, 'format':dateformat}, column)
-
-def parse_string(value: str) -> Optional[str]:
- result = value.strip()
- if result == "":
- return None
- return result
-
-def is_latlong(table, field) -> bool:
- return table.name == 'Locality' \
- and field.name in ('latitude1', 'longitude1', 'latitude2', 'longitude2')
-
-def parse_latlong(field, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- parsed = parse_coord(value)
-
- if parsed is None:
- return ParseFailure('coordinateBadFormat', {'value':value}, column)
-
- coord, unit = parsed
- if field.name.startswith('lat') and abs(coord) >= 90:
- return ParseFailure('latitudeOutOfRange', {'value':value}, column)
-
- if field.name.startswith('long') and abs(coord) >= 180:
- return ParseFailure('longitudeOutOfRange', {'value': value}, column)
-
- text_filter = {field.name.replace('itude', '') + 'text': parse_string(value)}
- return ParseResult(
- text_filter,
- {field.name: coord, 'originallatlongunit': unit, **text_filter},
- None,
- column,
- None
- )
-
-
-def parse_coord(value: str) -> Optional[Tuple[float, int]]:
- for p in LATLONG_PARSER_DEFS:
- match = re.compile(p.regex, re.I).match(value)
- if match and match.group(1):
- try:
- # relies on signed zeros in floats
- # see https://docs.python.org/3/library/math.html#math.copysign
- comps = [float(match.group(i)) for i in p.comp_groups]
- except ValueError:
- continue
- result, divisor = 0.0, 1
- for comp in comps:
- result += abs(comp) / divisor
- divisor *= 60
- result = math.copysign(result, comps[0])
- if match.group(p.dir_group).lower() in ("s", "w"):
- result = -result
- return (result, p.unit)
- return None
-
-class LatLongParserDef(NamedTuple):
- regex: str
- comp_groups: List[int]
- dir_group: int
- unit: int
-
-LATLONG_PARSER_DEFS = [
- LatLongParserDef(
- r'^(-?\d{0,3}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
- [1],
- 3,
- 0
- ),
-
- LatLongParserDef(
- r'^(-?\d{1,3})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
- [1, 2],
- 4,
- 2
- ),
-
- LatLongParserDef(
- r'^(-?\d{1,3})[^\d\.]+(\d{1,2})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
- [1, 2, 3],
- 5,
- 1
- ),
-]
+ raise NotImplementedError(
+ "unknown picklist type {}".format(picklist.type))
def assertNever(x: NoReturn) -> NoReturn:
diff --git a/specifyweb/workbench/upload/tests/test_upload_results_json.py b/specifyweb/workbench/upload/tests/test_upload_results_json.py
index 27365990761..539421b8fb5 100644
--- a/specifyweb/workbench/upload/tests/test_upload_results_json.py
+++ b/specifyweb/workbench/upload/tests/test_upload_results_json.py
@@ -69,7 +69,7 @@ def testUploadResultExplicit(self):
columns=['report info column 1', 'report info column 2'],
treeInfo=None
))
- parse_failure: ParseFailure = ParseFailure(
+ parse_failure: WorkBenchParseFailure = WorkBenchParseFailure(
message='parse failure message',
payload={'parse failure payload key 1': 'parse failure payload value 1', 'parse failure payload key 2': 'parse failure payload value 2'},
column='parse failure column')
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index d8156817035..e5c019a8c86 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -11,14 +11,15 @@
from specifyweb.specify.datamodel import datamodel
from specifyweb.stored_queries.format import LDLM_TO_MYSQL, MYSQL_TO_MONTH, \
MYSQL_TO_YEAR
+from specifyweb.specify.parse import parse_coord, parse_date, ParseFailure
from .base import UploadTestsBase, get_table
from ..column_options import ColumnOptions
-from ..parsing import parse_coord, parse_date, ParseResult as PR
+from ..parsing import ParseResult as PR
from ..treerecord import TreeRecord
from ..upload import do_upload, do_upload_csv
from ..upload_plan_schema import parse_column_options
from ..upload_result import Uploaded, Matched, NullRecord, ParseFailures, \
- ParseFailure
+ WorkBenchParseFailure
from ..upload_results_schema import schema as upload_results_schema
from ..upload_table import UploadTable
@@ -27,12 +28,12 @@
class DateParsingTests(unittest.TestCase):
def test_bad1(self) -> None:
- result = parse_date(co, 'catalogeddate', '%d/%m/%Y', 'foobar', 'catdate')
- self.assertEqual(ParseFailure(message='invalidYear', payload={'value':'foobar'}, column='catdate'), result)
+ result = parse_date(co, 'catalogeddate', '%d/%m/%Y', 'foobar')
+ self.assertEqual(ParseFailure(message='invalidYear', payload={'value':'foobar'}), result)
def test_bad2(self) -> None:
- result = parse_date(co, 'catalogeddate', '%d/%m/%Y', '1978-7-24', 'catdate')
- self.assertEqual(ParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}, column='catdate'), result)
+ result = parse_date(co, 'catalogeddate', '%d/%m/%Y', '1978-7-24')
+ self.assertEqual(ParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}), result)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_full_date(self, date, format) -> None:
@@ -279,7 +280,7 @@ def test_readonly_picklist(self) -> None:
result2 = results[2].record_result
assert isinstance(result2, ParseFailures)
- self.assertEqual([ParseFailure(
+ self.assertEqual([WorkBenchParseFailure(
message='failedParsingPickList',
payload={'value': 'Hon.'},
column='title'
@@ -343,7 +344,7 @@ def test_multiple_parsing_errors_reported(self) -> None:
failed_result = upload_results[0].record_result
self.assertIsInstance(failed_result, ParseFailures)
assert isinstance(failed_result, ParseFailures) # make typechecker happy
- self.assertEqual([ParseFailure(message='invalidYear', payload={'value':'foobar'}, column='Start Date Collected'), ParseFailure(message='invalidYear', payload={'value': 'bad date'}, column='ID Date')], failed_result.failures)
+ self.assertEqual([WorkBenchParseFailure(message='invalidYear', payload={'value':'foobar'}, column='Start Date Collected'), WorkBenchParseFailure(message='invalidYear', payload={'value': 'bad date'}, column='ID Date')], failed_result.failures)
def test_out_of_range_lat_long(self) -> None:
reader = csv.DictReader(io.StringIO(
@@ -354,7 +355,7 @@ def test_out_of_range_lat_long(self) -> None:
failed_result = upload_results[0].record_result
self.assertIsInstance(failed_result, ParseFailures)
assert isinstance(failed_result, ParseFailures) # make typechecker happy
- self.assertEqual([ParseFailure(message='latitudeOutOfRange', payload={'value':'128° 06.07\' N'}, column='Latitude1'), ParseFailure(message='longitudeOutOfRange', payload={'value': '191° 02.42\' W'}, column='Longitude1')], failed_result.failures)
+ self.assertEqual([WorkBenchParseFailure(message='latitudeOutOfRange', payload={'value':'128° 06.07\' N'}, column='Latitude1'), WorkBenchParseFailure(message='longitudeOutOfRange', payload={'value': '191° 02.42\' W'}, column='Longitude1')], failed_result.failures)
def test_agent_type(self) -> None:
plan = UploadTable(
@@ -387,7 +388,7 @@ def test_agent_type(self) -> None:
result2 = results[2].record_result
assert isinstance(result2, ParseFailures)
- self.assertEqual([ParseFailure(message='failedParsingAgentType',payload={'badType':'Extra terrestrial','validTypes':['Organization', 'Person', 'Other', 'Group']}, column='agenttype')], result2.failures)
+ self.assertEqual([WorkBenchParseFailure(message='failedParsingAgentType',payload={'badType':'Extra terrestrial','validTypes':['Organization', 'Person', 'Other', 'Group']}, column='agenttype')], result2.failures)
result3 = results[3].record_result
assert isinstance(result3, Uploaded)
@@ -412,7 +413,7 @@ def test_tree_cols_without_name(self) -> None:
results = do_upload(self.collection, data, plan, self.agent.id)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='invalidPartialRecord', payload={'column':'Species'}, column='Species Author')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='invalidPartialRecord', payload={'column':'Species'}, column='Species Author')]))
def test_value_too_long(self) -> None:
plan = TreeRecord(
@@ -431,7 +432,7 @@ def test_value_too_long(self) -> None:
self.assertIsInstance(results[0].record_result, Uploaded)
self.assertIsInstance(results[1].record_result, Uploaded)
- self.assertEqual(results[2].record_result, ParseFailures(failures=[ParseFailure(message='valueTooLong', payload={'maxLength': 128}, column='Species Author')]))
+ self.assertEqual(results[2].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='valueTooLong', payload={'maxLength': 128}, column='Species Author')]))
class MatchingBehaviorTests(UploadTestsBase):
@@ -795,7 +796,7 @@ def test_wbcols_with_null_disallowed(self) -> None:
validate([result.to_json()], upload_results_schema)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
self.assertIsInstance(results[2].record_result, Uploaded)
def test_wbcols_with_null_disallowed_and_ignoreWhenBlank(self) -> None:
@@ -822,7 +823,7 @@ def test_wbcols_with_null_disallowed_and_ignoreWhenBlank(self) -> None:
validate([result.to_json()], upload_results_schema)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
self.assertIsInstance(results[2].record_result, Uploaded)
self.assertIsInstance(results[3].record_result, Matched)
self.assertIsInstance(results[4].record_result, Uploaded)
@@ -851,7 +852,7 @@ def test_wbcols_with_null_disallowed_and_ignoreAlways(self) -> None:
validate([result.to_json()], upload_results_schema)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
self.assertIsInstance(results[2].record_result, Uploaded)
self.assertIsInstance(results[3].record_result, Matched)
self.assertIsInstance(results[4].record_result, Matched)
diff --git a/specifyweb/workbench/upload/treerecord.py b/specifyweb/workbench/upload/treerecord.py
index 14fc286bdab..d34f7619aa5 100644
--- a/specifyweb/workbench/upload/treerecord.py
+++ b/specifyweb/workbench/upload/treerecord.py
@@ -11,7 +11,7 @@
from specifyweb.businessrules.exceptions import BusinessRuleException
from specifyweb.specify import models
from .column_options import ColumnOptions, ExtendedColumnOptions
-from .parsing import ParseResult, ParseFailure, parse_many, filter_and_upload
+from .parsing import ParseResult, WorkBenchParseFailure, parse_many, filter_and_upload
from .upload_result import UploadResult, NullRecord, NoMatch, Matched, \
MatchedMultiple, Uploaded, ParseFailures, FailedBusinessRule, ReportInfo, \
TreeInfo
@@ -59,7 +59,7 @@ def get_treedefs(self) -> Set:
def bind(self, collection, row: Row, uploadingAgentId: Optional[int], auditor: Auditor, cache: Optional[Dict]=None, row_index: Optional[int] = None) -> Union["BoundTreeRecord", ParseFailures]:
parsedFields: Dict[str, List[ParseResult]] = {}
- parseFails: List[ParseFailure] = []
+ parseFails: List[WorkBenchParseFailure] = []
for rank, cols in self.ranks.items():
nameColumn = cols['name']
presults, pfails = parse_many(collection, self.name, cols, row)
@@ -68,7 +68,7 @@ def bind(self, collection, row: Row, uploadingAgentId: Optional[int], auditor: A
filters = {k: v for result in presults for k, v in result.filter_on.items()}
if filters.get('name', None) is None:
parseFails += [
- ParseFailure('invalidPartialRecord',{'column':nameColumn.column}, result.column)
+ WorkBenchParseFailure('invalidPartialRecord',{'column':nameColumn.column}, result.column)
for result in presults
if any(v is not None for v in result.filter_on.values())
]
@@ -303,7 +303,7 @@ def _upload(self, to_upload: List[TreeDefItemWithParseResults], matched: Union[M
missing_requireds = [
# TODO: there should probably be a different structure for
# missing required fields than ParseFailure
- ParseFailure(r.missing_required, {}, r.column)
+ WorkBenchParseFailure(r.missing_required, {}, r.column)
for tdiwpr in to_upload
for r in tdiwpr.results
if r.missing_required is not None
diff --git a/specifyweb/workbench/upload/upload_result.py b/specifyweb/workbench/upload/upload_result.py
index a993c330387..bda62905e41 100644
--- a/specifyweb/workbench/upload/upload_result.py
+++ b/specifyweb/workbench/upload/upload_result.py
@@ -2,7 +2,7 @@
from typing_extensions import Literal
-from .parsing import ParseFailure
+from .parsing import WorkBenchParseFailure
Failure = Literal["Failure"]
@@ -153,7 +153,7 @@ def json_to_NoMatch(json: Dict) -> NoMatch:
return NoMatch(info=json_to_ReportInfo(r['info']))
class ParseFailures(NamedTuple):
- failures: List[ParseFailure]
+ failures: List[WorkBenchParseFailure]
def get_id(self) -> Failure:
return "Failure"
@@ -163,7 +163,7 @@ def to_json(self):
def json_to_ParseFailures(json: Dict) -> ParseFailures:
r = json['ParseFailures']
- return ParseFailures(failures=[ParseFailure(*i) for i in r['failures']])
+ return ParseFailures(failures=[WorkBenchParseFailure(*i) for i in r['failures']])
class PropagatedFailure(NamedTuple):
def get_id(self) -> Failure:
diff --git a/specifyweb/workbench/upload/upload_table.py b/specifyweb/workbench/upload/upload_table.py
index f299853b3f5..dda71e8a44f 100644
--- a/specifyweb/workbench/upload/upload_table.py
+++ b/specifyweb/workbench/upload/upload_table.py
@@ -8,7 +8,7 @@
from specifyweb.businessrules.exceptions import BusinessRuleException
from specifyweb.specify import models
from .column_options import ColumnOptions, ExtendedColumnOptions
-from .parsing import parse_many, ParseResult, ParseFailure
+from .parsing import parse_many, ParseResult, WorkBenchParseFailure
from .tomany import ToManyRecord, ScopedToManyRecord, BoundToManyRecord
from .upload_result import UploadResult, Uploaded, NoMatch, Matched, \
MatchedMultiple, NullRecord, FailedBusinessRule, ReportInfo, \
@@ -467,7 +467,7 @@ def _do_upload(self, model, toOneResults: Dict[str, UploadResult], info: ReportI
missing_requireds = [
# TODO: there should probably be a different structure for
# missing required fields than ParseFailure
- ParseFailure(parsedField.missing_required, {}, parsedField.column)
+ WorkBenchParseFailure(parsedField.missing_required, {}, parsedField.column)
for parsedField in self.parsedFields
if parsedField.missing_required is not None
]
From 3bc5f7fe551d1946df4dc284f9664ca0e1df9b1a Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 15 Feb 2024 11:16:48 -0600
Subject: [PATCH 05/71] Finish parsing implementation
---
.../components/Header/ImportLocalitySet.tsx | 26 ++++-
specifyweb/specify/import_locality.py | 107 ++++++++++++++++++
specifyweb/specify/parse.py | 2 +-
specifyweb/specify/urls.py | 2 +-
specifyweb/specify/views.py | 35 +++++-
.../workbench/upload/tests/testparsing.py | 6 +-
6 files changed, 162 insertions(+), 16 deletions(-)
create mode 100644 specifyweb/specify/import_locality.py
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 257b804e654..b373a716002 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -40,6 +40,9 @@ export function ImportLocalitySet(): JSX.Element {
unrecognizedHeaders: [] as RA,
});
+ const [headers, setHeaders] = React.useState>([]);
+ const [data, setData] = React.useState>>([]);
+
return (
<>
{
- const parsedHeader = currentHeader.toLowerCase() as Header;
+ const parsedHeader = currentHeader.toLowerCase().trim() as Header;
const isUnknown = !acceptedHeaders.has(parsedHeader);
return {
@@ -67,10 +70,8 @@ export function ImportLocalitySet(): JSX.Element {
}
);
setHeaderErrors(foundHeaderErrors);
- if (
- Object.values(foundHeaderErrors).some((errors) => errors.length > 0)
- )
- return;
+ setHeaders(headers);
+ setData(data.slice(1));
}}
/>
{Object.values(headerErrors).some((errors) => errors.length > 0) && (
@@ -79,7 +80,20 @@ export function ImportLocalitySet(): JSX.Element {
<>
{commonText.close()}
{headerErrors.missingRequiredHeaders.length === 0 && (
- {commonText.import()}
+
+ ajax('/api/import/locality_set/', {
+ headers: { Accept: 'application/json' },
+ body: {
+ columnHeaders: headers,
+ data,
+ },
+ method: 'POST',
+ })
+ }
+ >
+ {commonText.import()}
+
)}
>
}
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
new file mode 100644
index 00000000000..c8fdb262034
--- /dev/null
+++ b/specifyweb/specify/import_locality.py
@@ -0,0 +1,107 @@
+from typing import Any, Dict, List, Union, Tuple, Literal, Optional, NamedTuple
+
+from django.db.models import QuerySet
+
+import specifyweb.specify.models as spmodels
+from specifyweb.specify.datamodel import datamodel
+from specifyweb.specify.parse import parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
+
+ParseErrorMessageKey = Literal[
+ 'guidNotProvided',
+ 'noLocalityMatchingGuid',
+ 'multipleLocalitiesWithGuid',
+
+ 'coordinateBadFormat',
+ 'latitudeOutOfRange',
+ 'longitudeOutOfRange'
+]
+
+updatable_locality_fields = ['latitude1', 'longitude1', 'datum']
+updatable_geocoorddetail_fields = [
+ field.name for field in datamodel.get_table_strict('Geocoorddetail').fields]
+
+ImportModel = Literal['Locality', 'Geocoorddetail']
+
+
+class ParseError(NamedTuple):
+ message: ParseErrorMessageKey
+ payload: Optional[Dict[str, Any]]
+ row_number: Optional[int]
+
+ @classmethod
+ def from_parse_failure(cls, parse_failure: BaseParseFailure, row_number: int):
+ return cls(parse_failure.message, parse_failure.paylod, row_number)
+
+ def to_json(self):
+ return {"message": self.message, "payload": self.payload, "row_number": self.row_number}
+
+
+class ParseSuccess(NamedTuple):
+ to_upload: Dict[str, Any]
+ model: ImportModel
+ locality_id: Optional[int]
+ row_number: Optional[str]
+
+ @classmethod
+ def from_base_parse_success(cls, parse_success: BaseParseSuccess, model: ImportModel, locality_id: Optional[int], row_number: int):
+ return cls(parse_success.to_upload, model, locality_id, row_number)
+
+
+def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]) -> Tuple[List[ParseSuccess], List[ParseError]]:
+ errors: List[ParseError] = []
+ to_upload: List[ParseSuccess] = []
+
+ headers = [header.strip() for header in raw_headers]
+
+ if 'guid' not in headers:
+ errors.append(ParseError('guidHeaderNotProvided'))
+
+ guid_index = headers.index('guid')
+ updatable_locality_fields_index = [{'field': field, 'index': headers.index(
+ field)} for field in headers if field.lower() in updatable_locality_fields]
+
+ geocoorddetail_fields_index = [{'field': field, 'index': headers.index(
+ field)} for field in headers if field.lower() in updatable_geocoorddetail_fields]
+
+ for row_mumber, row in enumerate(data):
+ guid = row[guid_index]
+ locality_query = spmodels.Locality.objects.filter(guid=guid)
+ if len(locality_query) == 0:
+ errors.append(ParseError('noLocalityMatchingGuid',
+ {'guid': guid}, row_mumber))
+
+ if len(locality_query) > 1:
+ errors.append(ParseError('multipleLocalitiesWithGuid', {'localityIds': tuple(
+ locality.id for locality in locality_query)}, row_mumber))
+
+ locality_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
+ for dict in updatable_locality_fields_index]
+
+ geocoorddetail_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
+ for dict in geocoorddetail_fields_index]
+
+ locality_id: Optional[int] = None if len(
+ locality_query) != 1 else locality_query[0].id
+
+ parsed_locality_fields = [parse_field(
+ collection, 'Locality', dict['field'], dict['value'], locality_id, row_mumber) for dict in locality_values]
+
+ parsed_geocoorddetail_fields = [parse_field(
+ collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_mumber) for dict in geocoorddetail_values]
+
+ for parsed in [*parsed_locality_fields, *parsed_geocoorddetail_fields]:
+ if isinstance(parsed, ParseError):
+ errors.append(parsed)
+ else:
+ to_upload.append(parsed)
+
+ return to_upload, errors
+
+
+def parse_field(collection, table_name: ImportModel, field_name: str, field_value: str, locality_id: Optional[int], row_number: int):
+ parsed = _parse_field(collection, table_name, field_name, field_value)
+
+ if isinstance(parsed, BaseParseFailure):
+ return ParseError.from_parse_failure(parsed, row_number)
+ else:
+ return ParseSuccess.from_base_parse_success(parsed, table_name, locality_id, row_number)
diff --git a/specifyweb/specify/parse.py b/specifyweb/specify/parse.py
index 3b739f01710..07d20952940 100644
--- a/specifyweb/specify/parse.py
+++ b/specifyweb/specify/parse.py
@@ -205,7 +205,7 @@ def parse_latlong(field: Field, value: str) -> ParseResult:
return ParseSucess({field.name.lower(): coord,
'originallatlongunit': unit,
- field.name.lower().replace('itude', '') + 'text': parse_string(value)}),
+ field.name.lower().replace('itude', '') + 'text': parse_string(value)})
def parse_coord(value: str) -> Optional[Tuple[float, int]]:
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index 418d2e5df34..b43c9a2aa1d 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -39,7 +39,7 @@
])),
url(r'^import/', include([
- url(r'^locality_set/$', views.import_locality_set)
+ url(r'^locality_set/$', views.upload_locality_set)
])),
# generates Sp6 master key
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 86eec173a1d..7a0651fa368 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -21,8 +21,8 @@
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
from specifyweb.celery_tasks import app
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
+from specifyweb.specify.import_locality import parse_locality_set
from . import api, models as spmodels
-from .build_models import orderings
from .specify_jar import specify_jar
from celery.utils.log import get_task_logger # type: ignore
logger = get_task_logger(__name__)
@@ -773,13 +773,12 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"post": {
"requestBody": {
"required": True,
- "description": "Replace a list of old records with a new record.",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
- "columns": {
+ "columnHeaders": {
"type": "array",
"items": {
"type": "string"
@@ -801,5 +800,31 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
}
}
})
-def import_locality_set(request):
- pass
+@login_maybe_required
+@require_POST
+def upload_locality_set(request: http.HttpRequest):
+ request_data = json.loads(request.body)
+ column_headers = request_data["columnHeaders"]
+ data = request_data["data"]
+
+ to_upload, errors = parse_locality_set(request.specify_collection, column_headers, data)
+
+ result = {
+ "type": None,
+ "data": []
+ }
+
+ if len(errors) > 0:
+ result["type"] = "Error"
+ result["data"] = [error.to_json() for error in errors]
+ return http.JsonResponse(result)
+
+ result["type"] = "Uploaded"
+ with transaction.atomic():
+ for parse_success in to_upload:
+ uploadable = parse_success.to_upload
+ model = parse_success.model
+ locality = parse_success.locality_id
+
+ return http.JsonResponse(result)
+
\ No newline at end of file
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index e5c019a8c86..04b3c11d63e 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -11,7 +11,7 @@
from specifyweb.specify.datamodel import datamodel
from specifyweb.stored_queries.format import LDLM_TO_MYSQL, MYSQL_TO_MONTH, \
MYSQL_TO_YEAR
-from specifyweb.specify.parse import parse_coord, parse_date, ParseFailure
+from specifyweb.specify.parse import parse_coord, parse_date, BaseParseFailure
from .base import UploadTestsBase, get_table
from ..column_options import ColumnOptions
from ..parsing import ParseResult as PR
@@ -29,11 +29,11 @@ class DateParsingTests(unittest.TestCase):
def test_bad1(self) -> None:
result = parse_date(co, 'catalogeddate', '%d/%m/%Y', 'foobar')
- self.assertEqual(ParseFailure(message='invalidYear', payload={'value':'foobar'}), result)
+ self.assertEqual(BaseParseFailure(message='invalidYear', payload={'value':'foobar'}), result)
def test_bad2(self) -> None:
result = parse_date(co, 'catalogeddate', '%d/%m/%Y', '1978-7-24')
- self.assertEqual(ParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}), result)
+ self.assertEqual(BaseParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}), result)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_full_date(self, date, format) -> None:
From f5045033ce443e562cb4127a65a7468365188df8 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 15 Feb 2024 11:25:39 -0600
Subject: [PATCH 06/71] Resolve backend tests
---
specifyweb/workbench/upload/tests/testparsing.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index 04b3c11d63e..e5c019a8c86 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -11,7 +11,7 @@
from specifyweb.specify.datamodel import datamodel
from specifyweb.stored_queries.format import LDLM_TO_MYSQL, MYSQL_TO_MONTH, \
MYSQL_TO_YEAR
-from specifyweb.specify.parse import parse_coord, parse_date, BaseParseFailure
+from specifyweb.specify.parse import parse_coord, parse_date, ParseFailure
from .base import UploadTestsBase, get_table
from ..column_options import ColumnOptions
from ..parsing import ParseResult as PR
@@ -29,11 +29,11 @@ class DateParsingTests(unittest.TestCase):
def test_bad1(self) -> None:
result = parse_date(co, 'catalogeddate', '%d/%m/%Y', 'foobar')
- self.assertEqual(BaseParseFailure(message='invalidYear', payload={'value':'foobar'}), result)
+ self.assertEqual(ParseFailure(message='invalidYear', payload={'value':'foobar'}), result)
def test_bad2(self) -> None:
result = parse_date(co, 'catalogeddate', '%d/%m/%Y', '1978-7-24')
- self.assertEqual(BaseParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}), result)
+ self.assertEqual(ParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}), result)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_full_date(self, date, format) -> None:
From 96cbf7dc6cae5bfe2c49acad2fb8a4bb251c0d82 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 15 Feb 2024 11:30:07 -0600
Subject: [PATCH 07/71] Fix typo in ParseFailure
---
specifyweb/specify/import_locality.py | 2 +-
specifyweb/specify/parse.py | 2 +-
specifyweb/workbench/upload/parsing.py | 2 +-
specifyweb/workbench/upload/tests/testparsing.py | 10 +++++-----
4 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index c8fdb262034..3492585b99e 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -30,7 +30,7 @@ class ParseError(NamedTuple):
@classmethod
def from_parse_failure(cls, parse_failure: BaseParseFailure, row_number: int):
- return cls(parse_failure.message, parse_failure.paylod, row_number)
+ return cls(parse_failure.message, parse_failure.payload, row_number)
def to_json(self):
return {"message": self.message, "payload": self.payload, "row_number": self.row_number}
diff --git a/specifyweb/specify/parse.py b/specifyweb/specify/parse.py
index 07d20952940..56cd001a623 100644
--- a/specifyweb/specify/parse.py
+++ b/specifyweb/specify/parse.py
@@ -14,7 +14,7 @@
class ParseFailure(NamedTuple):
message: str
- paylod: Dict[str, Any]
+ payload: Dict[str, Any]
def to_json(self) -> List:
return list(self)
diff --git a/specifyweb/workbench/upload/parsing.py b/specifyweb/workbench/upload/parsing.py
index 73ed34931c0..03b88702c6e 100644
--- a/specifyweb/workbench/upload/parsing.py
+++ b/specifyweb/workbench/upload/parsing.py
@@ -26,7 +26,7 @@ class WorkBenchParseFailure(NamedTuple):
@classmethod
def from_parse_failure(cls, pf: ParseFailure, column: str):
- return cls(message=pf.message, payload=pf.paylod, column=column)
+ return cls(message=pf.message, payload=pf.payload, column=column)
def to_json(self) -> List:
return list(self)
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index e5c019a8c86..b8c55b9c192 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -38,7 +38,7 @@ def test_bad2(self) -> None:
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_full_date(self, date, format) -> None:
datestr = date.strftime(format)
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
+ result = parse_date(co, 'catalogeddate', format, datestr)
self.assertIsInstance(result, PR)
assert isinstance(result, PR)
self.assertEqual({'catalogeddate': date, 'catalogeddateprecision': 1}, result.upload)
@@ -46,7 +46,7 @@ def test_full_date(self, date, format) -> None:
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_month(self, date, format) -> None:
datestr = date.strftime(MYSQL_TO_MONTH[format])
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
+ result = parse_date(co, 'catalogeddate', format, datestr)
self.assertIsInstance(result, PR)
assert isinstance(result, PR)
self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.upload)
@@ -54,7 +54,7 @@ def test_month(self, date, format) -> None:
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_year(self, date, format) -> None:
datestr = date.strftime(MYSQL_TO_YEAR[format])
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
+ result = parse_date(co, 'catalogeddate', format, datestr)
self.assertIsInstance(result, PR)
assert isinstance(result, PR)
self.assertEqual({'catalogeddate': date.replace(day=1, month=1), 'catalogeddateprecision': 3}, result.upload)
@@ -63,7 +63,7 @@ def test_year(self, date, format) -> None:
def test_zero_day(self, date, format) -> None:
datestr = date.strftime(re.sub('%d', '00', format))
self.assertTrue('00' in datestr)
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
+ result = parse_date(co, 'catalogeddate', format, datestr)
self.assertIsInstance(result, PR)
assert isinstance(result, PR)
self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.upload)
@@ -72,7 +72,7 @@ def test_zero_day(self, date, format) -> None:
def test_zero_month(self, date, format) -> None:
datestr = date.strftime(re.sub('(%d)|(%m)', '00', format))
self.assertIn('00', datestr)
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
+ result = parse_date(co, 'catalogeddate', format, datestr)
self.assertIsInstance(result, PR)
assert isinstance(result, PR)
self.assertEqual({'catalogeddate': date.replace(day=1,month=1), 'catalogeddateprecision': 3}, result.upload)
From af253eac812d14c68ce30d6ac6e4cb580669dc8a Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 15 Feb 2024 11:42:22 -0600
Subject: [PATCH 08/71] Use the correct parse result class in parse date tests
---
.../workbench/upload/tests/testparsing.py | 32 +++++++++----------
1 file changed, 16 insertions(+), 16 deletions(-)
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index b8c55b9c192..23235f1cde0 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -11,7 +11,7 @@
from specifyweb.specify.datamodel import datamodel
from specifyweb.stored_queries.format import LDLM_TO_MYSQL, MYSQL_TO_MONTH, \
MYSQL_TO_YEAR
-from specifyweb.specify.parse import parse_coord, parse_date, ParseFailure
+from specifyweb.specify.parse import parse_coord, parse_date, ParseFailure, ParseSucess
from .base import UploadTestsBase, get_table
from ..column_options import ColumnOptions
from ..parsing import ParseResult as PR
@@ -39,43 +39,43 @@ def test_bad2(self) -> None:
def test_full_date(self, date, format) -> None:
datestr = date.strftime(format)
result = parse_date(co, 'catalogeddate', format, datestr)
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date, 'catalogeddateprecision': 1}, result.upload)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date, 'catalogeddateprecision': 1}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_month(self, date, format) -> None:
datestr = date.strftime(MYSQL_TO_MONTH[format])
result = parse_date(co, 'catalogeddate', format, datestr)
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.upload)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_year(self, date, format) -> None:
datestr = date.strftime(MYSQL_TO_YEAR[format])
result = parse_date(co, 'catalogeddate', format, datestr)
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1, month=1), 'catalogeddateprecision': 3}, result.upload)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1, month=1), 'catalogeddateprecision': 3}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_zero_day(self, date, format) -> None:
datestr = date.strftime(re.sub('%d', '00', format))
self.assertTrue('00' in datestr)
result = parse_date(co, 'catalogeddate', format, datestr)
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.upload)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f and '%b' not in f]))
def test_zero_month(self, date, format) -> None:
datestr = date.strftime(re.sub('(%d)|(%m)', '00', format))
self.assertIn('00', datestr)
result = parse_date(co, 'catalogeddate', format, datestr)
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1,month=1), 'catalogeddateprecision': 3}, result.upload)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1,month=1), 'catalogeddateprecision': 3}, result.to_upload)
class ParsingTests(UploadTestsBase):
def setUp(self) -> None:
From c017fb0650d70e40018da7991484d7b827a9b46f Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 13 Mar 2024 13:26:35 -0500
Subject: [PATCH 09/71] Finish backend implementation and improve Parsing types
---
specifyweb/specify/import_locality.py | 27 ++++---
specifyweb/specify/parse.py | 21 +++++-
specifyweb/specify/uiformatters.py | 6 +-
specifyweb/specify/views.py | 103 +++++++++++++++++++++++---
4 files changed, 128 insertions(+), 29 deletions(-)
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 3492585b99e..ba4c026dd0a 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -1,21 +1,20 @@
-from typing import Any, Dict, List, Union, Tuple, Literal, Optional, NamedTuple
-
-from django.db.models import QuerySet
+from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union
import specifyweb.specify.models as spmodels
from specifyweb.specify.datamodel import datamodel
-from specifyweb.specify.parse import parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
+from specifyweb.specify.parse import ParseFailureKey, parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
-ParseErrorMessageKey = Literal[
- 'guidNotProvided',
+LocalityParseErrorMessageKey = Literal[
+ 'guidHeaderNotProvided',
'noLocalityMatchingGuid',
'multipleLocalitiesWithGuid',
-
- 'coordinateBadFormat',
- 'latitudeOutOfRange',
- 'longitudeOutOfRange'
]
+# constructs a list with the string literals defined in the
+# base ParseFailureKey and LocalityParseErrorMessageKey types
+localityParseErrorMessages: List[LocalityParseErrorMessageKey] = list(
+ set(get_typing_args(LocalityParseErrorMessageKey)) | set(get_typing_args(ParseFailureKey)))
+
updatable_locality_fields = ['latitude1', 'longitude1', 'datum']
updatable_geocoorddetail_fields = [
field.name for field in datamodel.get_table_strict('Geocoorddetail').fields]
@@ -24,16 +23,16 @@
class ParseError(NamedTuple):
- message: ParseErrorMessageKey
+ message: Union[ParseFailureKey, LocalityParseErrorMessageKey]
payload: Optional[Dict[str, Any]]
row_number: Optional[int]
@classmethod
def from_parse_failure(cls, parse_failure: BaseParseFailure, row_number: int):
return cls(parse_failure.message, parse_failure.payload, row_number)
-
+
def to_json(self):
- return {"message": self.message, "payload": self.payload, "row_number": self.row_number}
+ return {"message": self.message, "payload": self.payload, "rowNumber": self.row_number}
class ParseSuccess(NamedTuple):
@@ -71,7 +70,7 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
{'guid': guid}, row_mumber))
if len(locality_query) > 1:
- errors.append(ParseError('multipleLocalitiesWithGuid', {'localityIds': tuple(
+ errors.append(ParseError('multipleLocalitiesWithGuid', {'guid': guid, 'localityIds': list(
locality.id for locality in locality_query)}, row_mumber))
locality_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
diff --git a/specifyweb/specify/parse.py b/specifyweb/specify/parse.py
index 56cd001a623..82d47ef24b8 100644
--- a/specifyweb/specify/parse.py
+++ b/specifyweb/specify/parse.py
@@ -1,7 +1,7 @@
import re
import math
-from typing import Dict, List, Tuple, Any, NamedTuple, Union, Optional
+from typing import Dict, List, Tuple, Any, NamedTuple, Union, Optional, Literal
from datetime import datetime
from decimal import Decimal
@@ -11,9 +11,24 @@
from specifyweb.specify.datamodel import datamodel, Table, Field, Relationship
from specifyweb.specify.uiformatters import get_uiformatter, FormatMismatch
+ParseFailureKey = Literal[
+'valueTooLong',
+'formatMismatch',
+
+'failedParsingDecimal',
+'failedParsingFloat',
+'failedParsingBoolean',
+'failedParsingAgentType',
+
+'invalidYear',
+'badDateFormat',
+
+'latitudeOutOfRange',
+'longitudeOutOfRange'
+]
class ParseFailure(NamedTuple):
- message: str
+ message: ParseFailureKey
payload: Dict[str, Any]
def to_json(self) -> List:
@@ -158,7 +173,7 @@ def parse_formatted(collection, uiformatter, table: Table, field: Union[Field, R
try:
parsed = uiformatter.parse(value)
except FormatMismatch as e:
- return ParseFailure(e.args[0], {})
+ return ParseFailure('formatMismatch', {'value': e.value, 'formatter': e.formatter})
if uiformatter.needs_autonumber(parsed):
canonicalized = uiformatter.autonumber_now(
diff --git a/specifyweb/specify/uiformatters.py b/specifyweb/specify/uiformatters.py
index 7a1958e87d1..3de7fe8fac4 100644
--- a/specifyweb/specify/uiformatters.py
+++ b/specifyweb/specify/uiformatters.py
@@ -68,6 +68,10 @@ def get_autonumber_group_filter(model, collection, format_name: str):
return default
class FormatMismatch(ValueError):
+ def __init__(self, *args: object, value: str, formatter: str) -> None:
+ super().__init__(*args)
+ self.value = value
+ self.formatter = formatter
pass
class UIFormatter(NamedTuple):
@@ -83,7 +87,7 @@ def parse_regexp(self) -> str:
def parse(self, value: str) -> Sequence[str]:
match = re.match(self.parse_regexp(), value)
if match is None:
- raise FormatMismatch("value {} doesn't match formatter {}". format(repr(value), self.value()))
+ raise FormatMismatch("value {} doesn't match formatter {}".format(repr(value), self.value()), value=repr(value), formatter=self.value())
return match.groups()
def value(self) -> str:
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index d4bc6f36177..c05bdb3567c 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -22,7 +22,7 @@
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
from specifyweb.celery_tasks import app
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
-from specifyweb.specify.import_locality import parse_locality_set
+from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set
from . import api, models as spmodels
from .specify_jar import specify_jar
from celery.utils.log import get_task_logger # type: ignore
@@ -798,6 +798,67 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
}
}
}
+ },
+ "responses": {
+ "200": {
+ "description": "The Locality records were updated and GeocoordDetails uploaded successfully ",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "enum": ["Uploaded"]
+ },
+ "data": {
+ "description": "An array of updated Locality IDs",
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Some values could not be successfully parsed",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["ParseError"]
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "description": "Keys for errors which occured during parsing",
+ "type": "string",
+ "enum": localityParseErrorMessages
+ },
+ "payload": {
+ "type": "object"
+ },
+ "rowNumber": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
}
}
})
@@ -805,27 +866,47 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
@require_POST
def upload_locality_set(request: http.HttpRequest):
request_data = json.loads(request.body)
- column_headers = request_data["columnHeaders"]
+ column_headers = request_data["columnHeaders"]
data = request_data["data"]
- to_upload, errors = parse_locality_set(request.specify_collection, column_headers, data)
-
+ to_upload, errors = parse_locality_set(
+ request.specify_collection, column_headers, data)
+
result = {
"type": None,
"data": []
}
if len(errors) > 0:
- result["type"] = "Error"
- result["data"] = [error.to_json() for error in errors]
- return http.JsonResponse(result)
-
+ result["type"] = "ParseError"
+ result["data"] = [error.to_json() for error in errors]
+ return http.JsonResponse(result, safe=False)
+
result["type"] = "Uploaded"
with transaction.atomic():
for parse_success in to_upload:
uploadable = parse_success.to_upload
- model = parse_success.model
- locality = parse_success.locality_id
+ model_name = parse_success.model
+ locality_id = parse_success.locality_id
+
+ if locality_id is None:
+ raise ValueError(
+ f"No matching Locality found on row {parse_success.row_number}")
+
+ model = getattr(spmodels, model_name)
+ locality = spmodels.Locality.objects.get(id=locality_id)
+
+ if model_name == 'Geocoorddetail':
+ locality.geocoorddetails.get_queryset().delete()
+ geoCoordDetail = model.objects.create(**uploadable)
+ geoCoordDetail.locality = locality
+ geoCoordDetail.save()
+ elif model_name == 'Locality':
+ # Queryset.update() is not used here as it does not send pre/post save signals
+ for field, value in uploadable.items():
+ setattr(locality, field, value)
+ locality.save()
+
+ result["data"].append(locality_id)
return http.JsonResponse(result)
-
\ No newline at end of file
From fbcf76391afe8c1be61807bdc7e30c1f03e10dcd Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 13 Mar 2024 13:29:06 -0500
Subject: [PATCH 10/71] Improve frontend localizationKey for backend parse
results
---
.../lib/components/WorkBench/resultsParser.ts | 41 +++++++++++++------
.../js_src/lib/localization/backEnd.ts | 3 ++
.../js_src/lib/utils/ajax/definitions.ts | 1 +
3 files changed, 32 insertions(+), 13 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts b/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts
index e2897a50e02..71efea0b4a3 100644
--- a/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts
+++ b/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts
@@ -172,21 +172,16 @@ export type UploadResult = {
};
};
-/** Back-end sends a validation key. Front-end translates it */
-export function resolveValidationMessage(
+export function resolveBackendParsingMessage(
key: string,
payload: IR
-): LocalizedString {
+): LocalizedString | undefined {
if (key === 'failedParsingBoolean')
return backEndText.failedParsingBoolean({ value: payload.value as string });
else if (key === 'failedParsingDecimal')
return backEndText.failedParsingDecimal({ value: payload.value as string });
else if (key === 'failedParsingFloat')
return backEndText.failedParsingFloat({ value: payload.value as string });
- else if (key === 'failedParsingPickList')
- return backEndText.failedParsingPickList({
- value: `"${payload.value as string}"`,
- });
else if (key === 'failedParsingAgentType')
return backEndText.failedParsingAgentType({
agentTypeField: getField(tables.Agent, 'agentType').label,
@@ -195,12 +190,6 @@ export function resolveValidationMessage(
(payload.validTypes as RA) ?? []
),
});
- else if (key === 'pickListValueTooLong')
- return backEndText.pickListValueTooLong({
- pickListTable: tables.PickList.label,
- pickList: payload.pickList as string,
- maxLength: payload.maxLength as number,
- });
else if (key === 'valueTooLong')
return backEndText.valueTooLong({
maxLength: payload.maxLength as number,
@@ -226,6 +215,32 @@ export function resolveValidationMessage(
return backEndText.longitudeOutOfRange({
value: payload.value as string,
});
+ else if (key === 'formatMismatch')
+ return backEndText.formatMismatch({
+ value: payload.value as string,
+ formatter: payload.formatter as string,
+ });
+ else return undefined;
+}
+
+/** Back-end sends a validation key. Front-end translates it */
+export function resolveValidationMessage(
+ key: string,
+ payload: IR
+): LocalizedString {
+ const baseParsedMessage = resolveBackendParsingMessage(key, payload);
+ if (baseParsedMessage !== undefined) {
+ return baseParsedMessage;
+ } else if (key === 'failedParsingPickList')
+ return backEndText.failedParsingPickList({
+ value: `"${payload.value as string}"`,
+ });
+ else if (key === 'pickListValueTooLong')
+ return backEndText.pickListValueTooLong({
+ pickListTable: tables.PickList.label,
+ pickList: payload.pickList as string,
+ maxLength: payload.maxLength as number,
+ });
else if (key === 'invalidPartialRecord')
return backEndText.invalidPartialRecord({
column: payload.column as string,
diff --git a/specifyweb/frontend/js_src/lib/localization/backEnd.ts b/specifyweb/frontend/js_src/lib/localization/backEnd.ts
index f079220e472..95c3a69e1b9 100644
--- a/specifyweb/frontend/js_src/lib/localization/backEnd.ts
+++ b/specifyweb/frontend/js_src/lib/localization/backEnd.ts
@@ -211,6 +211,9 @@ export const backEndText = createDictionary({
Tatsächlich: {value:string}
`,
},
+ formatMismatch: {
+ 'en-us': 'value {value:string} does not match formatter {formatter:string}',
+ },
invalidPartialRecord: {
'en-us': 'this field must be empty if {column:string} is empty',
'ru-ru': 'это поле должно быть пустым, если {column:string} пусто',
diff --git a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
index 6fd51a277e2..221d27eb77f 100644
--- a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
+++ b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
@@ -13,6 +13,7 @@ export const Http = {
CONFLICT: 409,
TOO_LARGE: 413,
MISDIRECTED: 421,
+ UNPROCESSABLE: 422,
HUGE_HEADER: 431,
SERVER_ERROR: 500,
BAD_GATEWAY: 502,
From 6d963b6f5a5cc31ac605fc52b1fb804222a69164 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 13 Mar 2024 13:31:30 -0500
Subject: [PATCH 11/71] Extract 'Create RecordSet' button into a general
component
---
.../components/QueryBuilder/Components.tsx | 2 +-
.../QueryBuilder/CreateRecordSet.tsx | 57 +++++++++++++++----
.../lib/components/QueryBuilder/Results.tsx | 4 +-
3 files changed, 48 insertions(+), 15 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
index a456b6cbce0..4ce729c0c89 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
@@ -182,7 +182,7 @@ export function QueryButton({
/**
* Create a Record Set from all query results.
- * See also `CreateRecordSet`
+ * See also `CreateRecordSetFromQuery`
*/
export function MakeRecordSetButton({
baseTableName,
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index 6f50d705fe2..fd3f6b969dc 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -16,20 +16,19 @@ import type { RecordSet, SpQuery, Tables } from '../DataModel/types';
import { raise } from '../Errors/Crash';
import { recordSetView } from '../FormParse/webOnlyViews';
import { ResourceView } from '../Forms/ResourceView';
+import { LoadingScreen } from '../Molecules/Dialog';
import { RecordSetCreated, recordSetFromQueryLoading } from './Components';
-/**
- * Create a record set from selected records.
- * See also `MakeRecordSetButton`
- */
export function CreateRecordSet({
- getIds,
+ recordIds,
baseTableName,
- queryResource,
+ saveComponent,
+ defaultRecordSetName,
}: {
- readonly getIds: () => RA;
+ readonly recordIds: RA | (() => RA);
readonly baseTableName: keyof Tables;
- readonly queryResource: SpecifyResource | undefined;
+ readonly saveComponent?: () => JSX.Element;
+ readonly defaultRecordSetName?: string;
}): JSX.Element {
const [state, setState] = React.useState<
| State<'Editing', { readonly recordSet: SpecifyResource }>
@@ -38,14 +37,19 @@ export function CreateRecordSet({
| State<'Saving'>
>({ type: 'Main' });
+ const resolvedRecordIds = React.useMemo(
+ () => (typeof recordIds === 'function' ? recordIds() : recordIds),
+ [recordIds]
+ );
+
return (
<>
{
const recordSet = new tables.RecordSet.Resource();
- if (queryResource !== undefined && !queryResource.isNew())
- recordSet.set('name', queryResource.get('name'));
+ if (defaultRecordSetName !== undefined)
+ recordSet.set('name', defaultRecordSetName);
setState({
type: 'Editing',
recordSet,
@@ -81,7 +85,7 @@ export function CreateRecordSet({
* duplicate IDs (when displaying a -to-many relationship)
*/
// @ts-expect-error
- recordSetItems: f.unique(getIds()).map((id) => ({
+ recordSetItems: f.unique(resolvedRecordIds).map((id) => ({
recordId: id,
})),
})
@@ -99,7 +103,9 @@ export function CreateRecordSet({
}}
/>
)}
- {state.type === 'Saving' && recordSetFromQueryLoading()}
+ {state.type === 'Saving' && typeof saveComponent === 'function'
+ ? saveComponent()
+ : LoadingScreen()}
{state.type === 'Saved' && (
);
}
+
+/**
+ * Create a record set from selected records.
+ * See also `MakeRecordSetButton`
+ */
+export function CreateRecordSetFromQuery({
+ getIds,
+ baseTableName,
+ queryResource,
+}: {
+ readonly getIds: () => RA;
+ readonly baseTableName: keyof Tables;
+ readonly queryResource: SpecifyResource | undefined;
+}): JSX.Element {
+ const recordSetName =
+ queryResource === undefined || queryResource.isNew()
+ ? undefined
+ : queryResource.get('name');
+ return (
+
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
index 7fbe27fcc8b..d821f0a3404 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
@@ -25,7 +25,7 @@ import {
import { fetchPickList } from '../PickLists/fetch';
import { userPreferences } from '../Preferences/userPreferences';
import { generateMappingPathPreview } from '../WbPlanView/mappingPreview';
-import { CreateRecordSet } from './CreateRecordSet';
+import { CreateRecordSetFromQuery } from './CreateRecordSet';
import type { QueryFieldSpec } from './fieldSpec';
import type { QueryField } from './helpers';
import { sortTypes } from './helpers';
@@ -225,7 +225,7 @@ export function QueryResults(props: QueryResultsProps): JSX.Element {
)}
{hasToolPermission('recordSets', 'create') && totalCount !== 0 ? (
selectedRows.size > 0 && !isDistinct ? (
-
Date: Wed, 13 Mar 2024 13:32:36 -0500
Subject: [PATCH 12/71] Add localization for backend parse errors
---
.../frontend/js_src/lib/localization/locality.ts | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 06559dc505e..1fdd46f2643 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -305,4 +305,20 @@ export const localityText = createDictionary({
localityImportedAcceptedHeaders: {
'en-us': 'Only the following headers are accepted',
},
+ localityImportErrorDialogHeader: {
+ 'en-us': 'Error(s) Occured while Parsing Dataset',
+ },
+ guidHeaderNotProvided: {
+ 'en-us': "The Dataset must contain a 'guid' header",
+ },
+ noLocalityMatchingGuid: {
+ 'en-us': "No Locality with guid: '{guid:string}'",
+ },
+ multipleLocalitiesWithGuid: {
+ 'en-us':
+ 'More than one Locality found with guid: {guid:string}. Locality IDs: {localityIds: string}',
+ },
+ rowNumber: {
+ 'en-us': 'Row Number',
+ },
} as const);
From af383e0c0c49f5d3d7489fae3faf30a95514c16e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 13 Mar 2024 13:40:43 -0500
Subject: [PATCH 13/71] Restore coordinateBadFormat ParseFailure
---
specifyweb/specify/parse.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/specifyweb/specify/parse.py b/specifyweb/specify/parse.py
index 82d47ef24b8..072af0e3889 100644
--- a/specifyweb/specify/parse.py
+++ b/specifyweb/specify/parse.py
@@ -23,6 +23,7 @@
'invalidYear',
'badDateFormat',
+'coordinateBadFormat',
'latitudeOutOfRange',
'longitudeOutOfRange'
]
@@ -209,7 +210,7 @@ def parse_latlong(field: Field, value: str) -> ParseResult:
parsed = parse_coord(value)
if parsed is None:
- return None
+ return ParseFailure('coordinateBadFormat', {'value': value})
coord, unit = parsed
if field.name.startswith('lat') and abs(coord) >= 90:
From fac170e5d4910912841cfa544c97c82fdd0525b3 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 13 Mar 2024 15:17:51 -0500
Subject: [PATCH 14/71] Finish frontend implementation
---
.../components/Header/ImportLocalitySet.tsx | 198 +++++++++++++++++-
.../js_src/lib/localization/locality.ts | 9 +
2 files changed, 197 insertions(+), 10 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index b373a716002..7baa53351ae 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -1,17 +1,28 @@
import React from 'react';
+import type { LocalizedString } from 'typesafe-i18n';
import { commonText } from '../../localization/common';
import { headerText } from '../../localization/header';
import { localityText } from '../../localization/locality';
+import { mainText } from '../../localization/main';
+import { notificationsText } from '../../localization/notifications';
import { ajax } from '../../utils/ajax';
-import type { RA } from '../../utils/types';
+import { f } from '../../utils/functools';
+import type { IR, RA } from '../../utils/types';
import { H2 } from '../Atoms';
import { Button } from '../Atoms/Button';
import { Submit } from '../Atoms/Submit';
+import { LoadingContext } from '../Core/Contexts';
import { tables } from '../DataModel/tables';
import type { Tables } from '../DataModel/types';
+import { softFail } from '../Errors/Crash';
+import { RecordSelectorFromIds } from '../FormSliders/RecordSelectorFromIds';
import { Dialog } from '../Molecules/Dialog';
import { CsvFilePicker } from '../Molecules/FilePicker';
+import { ProtectedTool } from '../Permissions/PermissionDenied';
+import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
+import { downloadDataSet } from '../WorkBench/helpers';
+import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
type Header = Exclude<
Lowercase<
@@ -34,6 +45,22 @@ const acceptedHeaders = new Set([
const requiredHeaders = new Set(['guid']);
+type LocalityImportParseError = {
+ readonly message: string;
+ readonly payload: IR;
+ readonly rowNumber: number;
+};
+
+type LocalityUploadResponse =
+ | {
+ readonly type: 'ParseError';
+ readonly data: RA;
+ }
+ | {
+ readonly type: 'Uploaded';
+ readonly data: RA;
+ };
+
export function ImportLocalitySet(): JSX.Element {
const [headerErrors, setHeaderErrors] = React.useState({
missingRequiredHeaders: [] as RA,
@@ -42,6 +69,11 @@ export function ImportLocalitySet(): JSX.Element {
const [headers, setHeaders] = React.useState>([]);
const [data, setData] = React.useState>>([]);
+ const [results, setResults] = React.useState<
+ LocalityUploadResponse | undefined
+ >(undefined);
+
+ const loading = React.useContext(LoadingContext);
return (
<>
@@ -81,15 +113,20 @@ export function ImportLocalitySet(): JSX.Element {
{commonText.close()}
{headerErrors.missingRequiredHeaders.length === 0 && (
- ajax('/api/import/locality_set/', {
- headers: { Accept: 'application/json' },
- body: {
- columnHeaders: headers,
- data,
- },
- method: 'POST',
- })
+ onClick={(): void =>
+ loading(
+ ajax(
+ '/api/import/locality_set/',
+ {
+ headers: { Accept: 'application/json' },
+ body: {
+ columnHeaders: headers,
+ data,
+ },
+ method: 'POST',
+ }
+ ).then(({ data }) => setResults(data))
+ )
}
>
{commonText.import()}
@@ -128,6 +165,147 @@ export function ImportLocalitySet(): JSX.Element {
>
)}
+ {results === undefined ? null : (
+ setResults(undefined)}
+ />
+ )}
+ >
+ );
+}
+
+function LocalityImportResults({
+ results,
+ onClose: handleClose,
+}: {
+ readonly results: LocalityUploadResponse;
+ readonly onClose: () => void;
+}): JSX.Element {
+ return (
+ <>
+ {results.type === 'ParseError' ? (
+
+ ) : results.type === 'Uploaded' ? (
+
+
+
+ }
+ ids={results.data}
+ isDependent={false}
+ newResource={undefined}
+ table={tables.Locality}
+ title={undefined}
+ totalCount={results.data.length}
+ onAdd={undefined}
+ onClone={undefined}
+ onClose={handleClose}
+ onDelete={undefined}
+ onSaved={f.void}
+ onSlide={undefined}
+ />
+ ) : null}
>
);
}
+
+function LocalityImportErrors({
+ results,
+ onClose: handleClose,
+}: {
+ readonly results: Extract<
+ LocalityUploadResponse,
+ { readonly type: 'ParseError' }
+ >;
+ readonly onClose: () => void;
+}): JSX.Element | null {
+ const loading = React.useContext(LoadingContext);
+
+ return (
+
+ );
+}
+
+function resolveImportLocalityErrorMessage(
+ key: string,
+ payload: IR
+): LocalizedString {
+ const baseParseResults = resolveBackendParsingMessage(key, payload);
+
+ if (baseParseResults !== undefined) {
+ return baseParseResults;
+ } else if (key === 'guidHeaderNotProvided') {
+ return localityText.guidHeaderNotProvided();
+ } else if (key === 'noLocalityMatchingGuid') {
+ return localityText.noLocalityMatchingGuid({
+ guid: payload.guid as string,
+ });
+ } else if (key === 'multipleLocalitiesWithGuid') {
+ return localityText.multipleLocalitiesWithGuid({
+ guid: payload.guid as string,
+ localityIds: (payload.localityIds as RA).join(', '),
+ });
+ } else {
+ return commonText.colonLine({
+ label: key,
+ value:
+ Object.keys(payload).length === 0 ? '' : `${JSON.stringify(payload)}`,
+ });
+ }
+}
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 1fdd46f2643..c074d4ee772 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -308,6 +308,15 @@ export const localityText = createDictionary({
localityImportErrorDialogHeader: {
'en-us': 'Error(s) Occured while Parsing Dataset',
},
+ localityImportErrorDialogDetails: {
+ 'en-us':
+ 'The following Errors occured at the provided Row (Line) Numbers of the file while parsing the Dataset: ',
+ },
+ localityImportErrorFileName: {
+ comment:
+ 'The file name which is used when Parse Errors are exported. The .csv file extension is appended to the end of this string',
+ 'en-us': 'Locality Import Errors - {date:string}',
+ },
guidHeaderNotProvided: {
'en-us': "The Dataset must contain a 'guid' header",
},
From 09c58810c2b8a5bb2fd1905dd694437fa8deb08e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Fri, 15 Mar 2024 18:44:01 -0500
Subject: [PATCH 15/71] Cleanup frontend code
---
.../components/Header/ImportLocalitySet.tsx | 26 ++-
.../components/Header/userToolDefinitions.ts | 1 -
.../components/Molecules/CsvFilePicker.tsx | 210 +++++++++++++++++
.../lib/components/Molecules/FilePicker.tsx | 215 +-----------------
.../js_src/lib/components/WbImport/index.tsx | 3 +-
.../js_src/lib/utils/ajax/definitions.ts | 2 +
6 files changed, 237 insertions(+), 220 deletions(-)
create mode 100644 specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 7baa53351ae..2330d6732b9 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -11,14 +11,14 @@ import { f } from '../../utils/functools';
import type { IR, RA } from '../../utils/types';
import { H2 } from '../Atoms';
import { Button } from '../Atoms/Button';
-import { Submit } from '../Atoms/Submit';
+import { formatConjunction } from '../Atoms/Internationalization';
import { LoadingContext } from '../Core/Contexts';
import { tables } from '../DataModel/tables';
import type { Tables } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
import { RecordSelectorFromIds } from '../FormSliders/RecordSelectorFromIds';
+import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
-import { CsvFilePicker } from '../Molecules/FilePicker';
import { ProtectedTool } from '../Permissions/PermissionDenied';
import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
import { downloadDataSet } from '../WorkBench/helpers';
@@ -112,7 +112,7 @@ export function ImportLocalitySet(): JSX.Element {
<>
{commonText.close()}
{headerErrors.missingRequiredHeaders.length === 0 && (
-
loading(
ajax(
@@ -130,7 +130,7 @@ export function ImportLocalitySet(): JSX.Element {
}
>
{commonText.import()}
-
+
)}
>
}
@@ -151,17 +151,29 @@ export function ImportLocalitySet(): JSX.Element {
{headerErrors.missingRequiredHeaders.length > 0 && (
<>
{localityText.localityImportMissingHeader()}
- {headerErrors.missingRequiredHeaders.join(', ')}
+
+ {formatConjunction(
+ headerErrors.missingRequiredHeaders as RA
+ )}
+
>
)}
{headerErrors.unrecognizedHeaders.length > 0 && (
<>
{localityText.localityImportUnrecognizedHeaders()}
- {headerErrors.unrecognizedHeaders.join(', ')}
+
+ {formatConjunction(
+ headerErrors.unrecognizedHeaders as RA
+ )}
+
>
)}
{localityText.localityImportedAcceptedHeaders()}
- {Array.from(acceptedHeaders).join(', ')}
+
+ {formatConjunction(
+ Array.from(acceptedHeaders) as unknown as RA
+ )}
+
>
)}
diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
index 291df05a450..07b7cd1237b 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
+++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
@@ -102,7 +102,6 @@ const rawUserTools = ensure>>>()({
[commonText.import()]: {
coGeImport: {
title: headerText.coGeImportDataset(),
- enabled: () => true,
url: '/specify/import/from-coge/',
icon: icons.globe,
},
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
new file mode 100644
index 00000000000..7669017a5c5
--- /dev/null
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
@@ -0,0 +1,210 @@
+import React from 'react';
+import type { LocalizedString } from 'typesafe-i18n';
+
+import { useAsyncState } from '../../hooks/useAsyncState';
+import { useStateForContext } from '../../hooks/useStateForContext';
+import { useTriggerState } from '../../hooks/useTriggerState';
+import { wbText } from '../../localization/workbench';
+import type { GetOrSet, GetSet, RA } from '../../utils/types';
+import { localized } from '../../utils/types';
+import { Container, H2 } from '../Atoms';
+import { Select } from '../Atoms/Form';
+import { parseCsv, wbImportPreviewSize } from '../WbImport/helpers';
+import { encodings } from '../WorkBench/encodings';
+import type { AutoCompleteItem } from './AutoComplete';
+import { AutoComplete } from './AutoComplete';
+import { FilePicker, Layout } from './FilePicker';
+
+export function CsvFilePicker({
+ header,
+ onFileImport: handleFileImport,
+}: {
+ readonly header: LocalizedString;
+ readonly onFileImport: ({
+ data,
+ hasHeader,
+ encoding,
+ getSetDelimiter,
+ }: {
+ readonly data: RA>;
+ readonly hasHeader: boolean;
+ readonly encoding: string;
+ readonly getSetDelimiter: GetOrSet;
+ }) => void;
+}): JSX.Element {
+ const [file, setFile] = React.useState();
+ const getSetHasHeader = useStateForContext(true);
+
+ return (
+
+ {header}
+
+
+
+ {typeof file === 'object' && (
+
+ )}
+
+ );
+}
+
+export function CsvFilePreview({
+ file,
+ getSetHasHeader,
+ children,
+ onFileImport: handleFileImport,
+}: {
+ readonly file: File;
+ readonly getSetHasHeader: GetOrSet;
+ readonly children?: JSX.Element | undefined;
+ readonly onFileImport: ({
+ data,
+ hasHeader,
+ encoding,
+ getSetDelimiter,
+ }: {
+ readonly data: RA>;
+ readonly hasHeader: boolean;
+ readonly encoding: string;
+ readonly getSetDelimiter: GetOrSet;
+ }) => void;
+}): JSX.Element {
+ const [encoding, setEncoding] = React.useState('utf-8');
+ const getSetDelimiter = useStateForContext(undefined);
+ const preview = useCsvPreview(file, encoding, getSetDelimiter);
+
+ return (
+ {
+ if (!Array.isArray(preview)) {
+ console.error('Failed to parse data for File ', file.name);
+ return;
+ }
+ handleFileImport({
+ data: preview,
+ hasHeader,
+ encoding,
+ getSetDelimiter,
+ });
+ }}
+ >
+ {children === undefined ? <>> : children}
+
+
+
+ );
+}
+
+export function useCsvPreview(
+ file: File,
+ encoding: string,
+ getSetDelimiter: GetSet
+): LocalizedString | RA> | undefined {
+ const [delimiter, setDelimiter] = getSetDelimiter;
+ const [preview] = useAsyncState>>(
+ React.useCallback(
+ async () =>
+ parseCsv(
+ file,
+ encoding,
+ [delimiter, setDelimiter],
+ wbImportPreviewSize
+ ).catch((error) => localized(error.message)),
+ [file, encoding, delimiter, setDelimiter]
+ ),
+ false
+ );
+ return preview;
+}
+
+function ChooseEncoding({
+ encoding = '',
+ isDisabled,
+ onChange: handleChange,
+}: {
+ readonly encoding: string;
+ readonly isDisabled: boolean;
+ readonly onChange: (encoding: string) => void;
+}): JSX.Element {
+ return (
+
+ );
+}
+
+export const delimiters: RA> = [
+ { label: wbText.comma(), searchValue: ',', data: ',' },
+ { label: wbText.tab(), searchValue: '\t', data: '\t' },
+ { label: wbText.semicolon(), searchValue: ';', data: ';' },
+ { label: wbText.space(), searchValue: ' ', data: ' ' },
+ { label: wbText.pipe(), searchValue: '|', data: '|' },
+];
+
+function ChooseDelimiter({
+ isDisabled,
+ getSetDelimiter: [delimiter, handleChange],
+}: {
+ readonly isDisabled: boolean;
+ readonly getSetDelimiter: GetSet;
+}): JSX.Element {
+ const [state, setState] = useTriggerState(delimiter);
+
+ /**
+ * Don't disable the component if it is currently focused, as disabling it
+ * would lead to focus loss, which is bad UX and an accessibility issue.
+ */
+ const inputRef = React.useRef(null);
+ const isFocused = inputRef.current === document.activeElement;
+ const disabled = isDisabled && !isFocused;
+
+ return (
+
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
index 11d52c52661..2a652ae3c1c 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
@@ -1,29 +1,18 @@
import React from 'react';
import type { LocalizedString } from 'typesafe-i18n';
-import { useAsyncState } from '../../hooks/useAsyncState';
import { useBooleanState } from '../../hooks/useBooleanState';
-import { useStateForContext } from '../../hooks/useStateForContext';
-import { useTriggerState } from '../../hooks/useTriggerState';
import { attachmentsText } from '../../localization/attachments';
import { commonText } from '../../localization/common';
import { wbText } from '../../localization/workbench';
-import type { GetOrSet, GetSet, RA } from '../../utils/types';
-import { localized } from '../../utils/types';
-import { Container, H2, H3 } from '../Atoms';
+import type { GetOrSet, RA } from '../../utils/types';
+import { H3 } from '../Atoms';
import { Button } from '../Atoms/Button';
import { className } from '../Atoms/className';
-import { Input, Select } from '../Atoms/Form';
+import { Input } from '../Atoms/Form';
import type { TagProps } from '../Atoms/wrapper';
-import {
- extractHeader,
- parseCsv,
- wbImportPreviewSize,
-} from '../WbImport/helpers';
-import { encodings } from '../WorkBench/encodings';
+import { extractHeader } from '../WbImport/helpers';
import { loadingGif } from '.';
-import type { AutoCompleteItem } from './AutoComplete';
-import { AutoComplete } from './AutoComplete';
import { useDragDropFiles } from './useDragDropFiles';
export function FilePicker({
@@ -194,202 +183,6 @@ export const fileToText = async (
fileReader.readAsText(file, encoding);
});
-export function CsvFilePicker({
- header,
- onFileImport: handleFileImport,
-}: {
- readonly header: LocalizedString;
- readonly onFileImport: ({
- data,
- hasHeader,
- encoding,
- getSetDelimiter,
- }: {
- readonly data: RA>;
- readonly hasHeader: boolean;
- readonly encoding: string;
- readonly getSetDelimiter: GetOrSet;
- }) => void;
-}): JSX.Element {
- const [file, setFile] = React.useState();
- const getSetHasHeader = useStateForContext(true);
-
- return (
-
- {header}
-
- setFile(file)}
- />
-
- {typeof file === 'object' && (
-
- )}
-
- );
-}
-
-export function CsvFilePreview({
- file,
- getSetHasHeader,
- children,
- onFileImport: handleFileImport,
-}: {
- readonly file: File;
- readonly getSetHasHeader: GetOrSet;
- readonly children?: JSX.Element | undefined;
- readonly onFileImport: ({
- data,
- hasHeader,
- encoding,
- getSetDelimiter,
- }: {
- readonly data: RA>;
- readonly hasHeader: boolean;
- readonly encoding: string;
- readonly getSetDelimiter: GetOrSet;
- }) => void;
-}): JSX.Element {
- const [encoding, setEncoding] = React.useState('utf-8');
- const getSetDelimiter = useStateForContext(undefined);
- const preview = useCsvPreview(file, encoding, getSetDelimiter);
-
- return (
- {
- if (!Array.isArray(preview)) {
- console.error('Failed to parse data for File ', file.name, preview);
- return;
- }
- handleFileImport({
- data: preview,
- hasHeader,
- encoding,
- getSetDelimiter,
- });
- }}
- >
- {children === undefined ? <>> : children}
-
-
-
- );
-}
-
-export function useCsvPreview(
- file: File,
- encoding: string,
- getSetDelimiter: GetSet
-): LocalizedString | RA> | undefined {
- const [delimiter, setDelimiter] = getSetDelimiter;
- const [preview] = useAsyncState>>(
- React.useCallback(
- async () =>
- parseCsv(
- file,
- encoding,
- [delimiter, setDelimiter],
- wbImportPreviewSize
- ).catch((error) => localized(error.message)),
- [file, encoding, delimiter, setDelimiter]
- ),
- false
- );
- return preview;
-}
-
-function ChooseEncoding({
- encoding = '',
- isDisabled,
- onChange: handleChange,
-}: {
- readonly encoding: string;
- readonly isDisabled: boolean;
- readonly onChange: (encoding: string) => void;
-}): JSX.Element {
- return (
-
- );
-}
-
-export const delimiters: RA> = [
- { label: wbText.comma(), searchValue: ',', data: ',' },
- { label: wbText.tab(), searchValue: '\t', data: '\t' },
- { label: wbText.semicolon(), searchValue: ';', data: ';' },
- { label: wbText.space(), searchValue: ' ', data: ' ' },
- { label: wbText.pipe(), searchValue: '|', data: '|' },
-];
-
-function ChooseDelimiter({
- isDisabled,
- getSetDelimiter: [delimiter, handleChange],
-}: {
- readonly isDisabled: boolean;
- readonly getSetDelimiter: GetSet;
-}): JSX.Element {
- const [state, setState] = useTriggerState(delimiter);
-
- /**
- * Don't disable the component if it is currently focused, as disabling it
- * would lead to focus loss, which is bad UX and an accessibility issue.
- */
- const inputRef = React.useRef(null);
- const isFocused = inputRef.current === document.activeElement;
- const disabled = isDisabled && !isFocused;
-
- return (
-
- );
-}
-
export function Layout({
preview,
getSetHasHeader: [hasHeader = true, setHasHeader],
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
index 24f18e42ca3..a56e54ce612 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
@@ -18,7 +18,8 @@ import { Container, H2 } from '../Atoms';
import { Input } from '../Atoms/Form';
import { LoadingContext } from '../Core/Contexts';
import { useMenuItem } from '../Header/MenuContext';
-import { CsvFilePreview, FilePicker, Layout } from '../Molecules/FilePicker';
+import { CsvFilePreview } from '../Molecules/CsvFilePicker';
+import { FilePicker, Layout } from '../Molecules/FilePicker';
import {
createDataSet,
extractFileName,
diff --git a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
index 221d27eb77f..1e500be9c4f 100644
--- a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
+++ b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
@@ -85,4 +85,6 @@ export const httpCodeToErrorMessage: RR, string> = {
[Http.INSUFFICIENT_STORAGE]: `
This error likely happened because the server has run out of storage space.
`,
+ [Http.UNPROCESSABLE]:
+ 'This error is likely caused by a bug in Specify. Please report it.',
};
From f0e372fb81dd0a622c8c7410995e677237b0097f Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 15 Apr 2024 09:08:06 -0500
Subject: [PATCH 16/71] Reorganize http error messages
---
.../js_src/lib/utils/ajax/definitions.ts | 24 +++++++++----------
1 file changed, 12 insertions(+), 12 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
index 1e500be9c4f..a2a4024bfd4 100644
--- a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
+++ b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
@@ -48,12 +48,6 @@ export const httpCodeToErrorMessage: RR, string> = {
access to, or your session has expired. Please try logging in again, or
repeat the action as a user with more permissions
`,
- // This error code is used by the front-end when request was aborted
- [Http.MISDIRECTED]: `
- This error happened because Specify failed to send a request to the server.
- Please try again, and if the problem persists, contact your system
- administrator.
- `,
[Http.CONFLICT]: `
This error happened because the resource you tried to update has already
been modified by someone else. Please refresh the page and try again.
@@ -63,21 +57,29 @@ export const httpCodeToErrorMessage: RR, string> = {
the configured server limit. Either contact your system administrator about
increasing the limit, or try uploading a smaller file.
`,
+ // This error code is used by the front-end when request was aborted
+ [Http.MISDIRECTED]: `
+ This error happened because Specify failed to send a request to the server.
+ Please try again, and if the problem persists, contact your system
+ administrator.
+ `,
+ [Http.UNPROCESSABLE]:
+ 'This error is likely caused by a bug in Specify. Please report it.',
[Http.HUGE_HEADER]:
'Please try clearing your cookies or using a different browser.',
[Http.SERVER_ERROR]: `
This error may indicate a misconfiguration or a bug in Specify. Please
double check your configuration and report this issue.
`,
- [Http.UNAVAILABLE]: `
- This error happened because the server is overloaded or this resource is
- currently unavailable. Please try logging in again later.
- `,
[Http.BAD_GATEWAY]: `
This error likely happened because the server is down, is not yet started,
or in a process of being restarted. If this issue does not resolve after a
few minutes, contact your system administrator.
`,
+ [Http.UNAVAILABLE]: `
+ This error happened because the server is overloaded or this resource is
+ currently unavailable. Please try logging in again later.
+ `,
[Http.GATEWAY_TIMEOUT]: `
This error likely happened because the server is overloaded or you sent a
large request. Please try again later.
@@ -85,6 +87,4 @@ export const httpCodeToErrorMessage: RR, string> = {
[Http.INSUFFICIENT_STORAGE]: `
This error likely happened because the server has run out of storage space.
`,
- [Http.UNPROCESSABLE]:
- 'This error is likely caused by a bug in Specify. Please report it.',
};
From b844b88dd40bfd6544237b010650775e653bebff Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 15 Apr 2024 09:27:26 -0500
Subject: [PATCH 17/71] Remove CreateRecordSetFromQuery component
---
.../QueryBuilder/CreateRecordSet.tsx | 35 ++++---------------
.../lib/components/QueryBuilder/Results.tsx | 14 +++++---
2 files changed, 16 insertions(+), 33 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index fd3f6b969dc..a1b3c3d3fbe 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -12,13 +12,17 @@ import {
serializeResource,
} from '../DataModel/serializers';
import { strictGetTable, tables } from '../DataModel/tables';
-import type { RecordSet, SpQuery, Tables } from '../DataModel/types';
+import type { RecordSet, Tables } from '../DataModel/types';
import { raise } from '../Errors/Crash';
import { recordSetView } from '../FormParse/webOnlyViews';
import { ResourceView } from '../Forms/ResourceView';
import { LoadingScreen } from '../Molecules/Dialog';
-import { RecordSetCreated, recordSetFromQueryLoading } from './Components';
+import { RecordSetCreated } from './Components';
+/**
+ * Renders a button to creates a record set from a group of records.
+ * See also `MakeRecordSetButton`
+ */
export function CreateRecordSet({
recordIds,
baseTableName,
@@ -115,30 +119,3 @@ export function CreateRecordSet({
>
);
}
-
-/**
- * Create a record set from selected records.
- * See also `MakeRecordSetButton`
- */
-export function CreateRecordSetFromQuery({
- getIds,
- baseTableName,
- queryResource,
-}: {
- readonly getIds: () => RA;
- readonly baseTableName: keyof Tables;
- readonly queryResource: SpecifyResource | undefined;
-}): JSX.Element {
- const recordSetName =
- queryResource === undefined || queryResource.isNew()
- ? undefined
- : queryResource.get('name');
- return (
-
- );
-}
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
index d821f0a3404..6bd5e78f913 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
@@ -25,7 +25,8 @@ import {
import { fetchPickList } from '../PickLists/fetch';
import { userPreferences } from '../Preferences/userPreferences';
import { generateMappingPathPreview } from '../WbPlanView/mappingPreview';
-import { CreateRecordSetFromQuery } from './CreateRecordSet';
+import { recordSetFromQueryLoading } from './Components';
+import { CreateRecordSet } from './CreateRecordSet';
import type { QueryFieldSpec } from './fieldSpec';
import type { QueryField } from './helpers';
import { sortTypes } from './helpers';
@@ -225,21 +226,26 @@ export function QueryResults(props: QueryResultsProps): JSX.Element {
)}
{hasToolPermission('recordSets', 'create') && totalCount !== 0 ? (
selectedRows.size > 0 && !isDistinct ? (
- =>
+ defaultRecordSetName={
+ queryResource?.isNew() ?? true
+ ? undefined
+ : queryResource?.get('name')
+ }
+ recordIds={(): RA =>
loadedResults
.filter((result) =>
selectedRows.has(result[queryIdField] as number)
)
.map((result) => result[queryIdField] as number)
}
- queryResource={queryResource}
+ saveComponent={recordSetFromQueryLoading}
/>
) : (
createRecordSet
From 37b71f1880467f14ad38ec2119cede6bd84acdb7 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 30 Apr 2024 11:32:08 -0500
Subject: [PATCH 18/71] Reset all context when closing dialogs
---
.../components/Header/ImportLocalitySet.tsx | 57 ++++++++++++++-----
.../js_src/lib/localization/locality.ts | 3 +
2 files changed, 47 insertions(+), 13 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 2330d6732b9..96cfabd4d40 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -75,6 +75,16 @@ export function ImportLocalitySet(): JSX.Element {
const loading = React.useContext(LoadingContext);
+ function resetContext(): void {
+ setHeaderErrors({
+ missingRequiredHeaders: [] as RA,
+ unrecognizedHeaders: [] as RA,
+ });
+ setHeaders([]);
+ setData([]);
+ setResults(undefined);
+ }
+
return (
<>
- {Object.values(headerErrors).some((errors) => errors.length > 0) && (
+ {Object.values(headerErrors).some((errors) => errors.length > 0) ? (
>
- )}
+ ) : data.length > 0 &&
+ !Object.values(headerErrors).some((errors) => errors.length > 0) ? (
+
+ ) : null}
{results === undefined ? null : (
- setResults(undefined)}
- />
+
)}
>
);
@@ -276,7 +307,7 @@ function LocalityImportErrors({
icon="error"
onClose={handleClose}
>
-
+
| {localityText.rowNumber()} |
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index c074d4ee772..e222a924d25 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -292,6 +292,9 @@ export const localityText = createDictionary({
'uk-ua': 'DD MM SS.SS N/S/E/W (32 45 42.84 N)',
'de-ch': 'DD MM SS.SS N/S/O/W (32 45 42.84 N)',
},
+ localityimportHeader: {
+ 'en-us': 'Import Locality Set',
+ },
localityImportHeaderError: {
'en-us': 'Errors Found in Column Headers',
},
From 07a8393e1eabd0572337766a1dc1b0f98d0d3782 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 14:55:45 -0500
Subject: [PATCH 19/71] Return 422 status when Parse Error occurs
---
.../js_src/lib/components/Header/ImportLocalitySet.tsx | 10 ++++++++--
specifyweb/specify/views.py | 4 ++--
2 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 96cfabd4d40..f559808dca6 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -7,6 +7,7 @@ import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
import { ajax } from '../../utils/ajax';
+import { Http } from '../../utils/ajax/definitions';
import { f } from '../../utils/functools';
import type { IR, RA } from '../../utils/types';
import { H2 } from '../Atoms';
@@ -195,12 +196,17 @@ export function ImportLocalitySet(): JSX.Element {
loading(
ajax('/api/import/locality_set/', {
headers: { Accept: 'application/json' },
+ expectedErrors: [Http.UNPROCESSABLE],
+ method: 'POST',
body: {
columnHeaders: headers,
data,
},
- method: 'POST',
- }).then(({ data }) => {
+ }).then(({ data: rawData, status }) => {
+ const data =
+ status === 422 && typeof rawData === 'string'
+ ? (JSON.parse(rawData) as LocalityUploadResponse)
+ : rawData;
setData([]);
setResults(data);
})
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index c05bdb3567c..7a85d2f0835 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -880,7 +880,7 @@ def upload_locality_set(request: http.HttpRequest):
if len(errors) > 0:
result["type"] = "ParseError"
result["data"] = [error.to_json() for error in errors]
- return http.JsonResponse(result, safe=False)
+ return http.JsonResponse(result, status=422, safe=False)
result["type"] = "Uploaded"
with transaction.atomic():
@@ -909,4 +909,4 @@ def upload_locality_set(request: http.HttpRequest):
result["data"].append(locality_id)
- return http.JsonResponse(result)
+ return http.JsonResponse(result, safe=False)
From 0bcea7fb27d2d3b4ebdaa93166a84a7a941bc66d Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 21:57:00 -0500
Subject: [PATCH 20/71] Include geocoorddetail ids in response
---
specifyweb/specify/import_locality.py | 30 ++++++++++++++++++++-------
specifyweb/specify/views.py | 17 ++++++++++++---
2 files changed, 37 insertions(+), 10 deletions(-)
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index ba4c026dd0a..e30a8261654 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -87,13 +87,19 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
parsed_geocoorddetail_fields = [parse_field(
collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_mumber) for dict in geocoorddetail_values]
-
- for parsed in [*parsed_locality_fields, *parsed_geocoorddetail_fields]:
- if isinstance(parsed, ParseError):
- errors.append(parsed)
- else:
- to_upload.append(parsed)
-
+
+ merged_locality_result, locality_errors = merge_parse_results('Locality', parsed_locality_fields, locality_id, row_mumber)
+
+ merged_geocoorddetail_result, geocoord_errors = merge_parse_results('Geocoorddetail', parsed_geocoorddetail_fields, locality_id, row_mumber)
+
+ errors.extend([*locality_errors, *geocoord_errors])
+
+ if merged_locality_result is not None:
+ to_upload.append(merged_locality_result)
+
+ if merged_geocoorddetail_result is not None:
+ to_upload.append(merged_geocoorddetail_result)
+
return to_upload, errors
@@ -104,3 +110,13 @@ def parse_field(collection, table_name: ImportModel, field_name: str, field_valu
return ParseError.from_parse_failure(parsed, row_number)
else:
return ParseSuccess.from_base_parse_success(parsed, table_name, locality_id, row_number)
+
+def merge_parse_results(table_name: ImportModel, results: List[Union[ParseSuccess, ParseError]], locality_id: int, row_number: int) -> Tuple[Optional[ParseSuccess], List[ParseError]]:
+ to_upload = {}
+ errors = []
+ for result in results:
+ if isinstance(result, ParseError):
+ errors.append(result)
+ else:
+ to_upload.update(result.to_upload)
+ return None if len(to_upload) == 0 else ParseSuccess(to_upload, table_name, locality_id, row_number), errors
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 7a85d2f0835..0ef2fadb98c 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -810,13 +810,21 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"type": {
"enum": ["Uploaded"]
},
- "data": {
+ "localities": {
"description": "An array of updated Locality IDs",
"type": "array",
"items": {
"type": "integer",
"minimum": 0
}
+ },
+ "geocoorddetails": {
+ "description": "An array of created geocoorddetail IDs",
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ }
}
},
}
@@ -883,6 +891,9 @@ def upload_locality_set(request: http.HttpRequest):
return http.JsonResponse(result, status=422, safe=False)
result["type"] = "Uploaded"
+ result["localities"] = []
+ result["geocoorddetails"] = []
+
with transaction.atomic():
for parse_success in to_upload:
uploadable = parse_success.to_upload
@@ -901,12 +912,12 @@ def upload_locality_set(request: http.HttpRequest):
geoCoordDetail = model.objects.create(**uploadable)
geoCoordDetail.locality = locality
geoCoordDetail.save()
+ result["geocoorddetails"].append(geoCoordDetail.id)
elif model_name == 'Locality':
# Queryset.update() is not used here as it does not send pre/post save signals
for field, value in uploadable.items():
setattr(locality, field, value)
locality.save()
-
- result["data"].append(locality_id)
+ result["localities"].append(locality_id)
return http.JsonResponse(result, safe=False)
From 2baf5c2c79ab6834acbe010f93fb292d0707091b Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 21:59:15 -0500
Subject: [PATCH 21/71] Show number of affected records after import
---
.../components/Header/ImportLocalitySet.tsx | 51 ++++++++++--------
.../QueryBuilder/CreateRecordSet.tsx | 4 +-
.../lib/components/WorkBench/Results.tsx | 52 ++++++++++++++-----
.../js_src/lib/localization/locality.ts | 4 ++
4 files changed, 73 insertions(+), 38 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index f559808dca6..4513877f5ad 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -6,9 +6,9 @@ import { headerText } from '../../localization/header';
import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
+import { wbText } from '../../localization/workbench';
import { ajax } from '../../utils/ajax';
import { Http } from '../../utils/ajax/definitions';
-import { f } from '../../utils/functools';
import type { IR, RA } from '../../utils/types';
import { H2 } from '../Atoms';
import { Button } from '../Atoms/Button';
@@ -17,12 +17,12 @@ import { LoadingContext } from '../Core/Contexts';
import { tables } from '../DataModel/tables';
import type { Tables } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
-import { RecordSelectorFromIds } from '../FormSliders/RecordSelectorFromIds';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
import { ProtectedTool } from '../Permissions/PermissionDenied';
import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
import { downloadDataSet } from '../WorkBench/helpers';
+import { TableRecordCounts } from '../WorkBench/Results';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
type Header = Exclude<
@@ -59,7 +59,8 @@ type LocalityUploadResponse =
}
| {
readonly type: 'Uploaded';
- readonly data: RA;
+ readonly localities: RA;
+ readonly geocoorddetails: RA;
};
export function ImportLocalitySet(): JSX.Element {
@@ -236,30 +237,36 @@ function LocalityImportResults({
{results.type === 'ParseError' ? (
) : results.type === 'Uploaded' ? (
- {commonText.close()}
+ }
+ header={wbText.uploadResults()}
+ modal={false}
+ onClose={handleClose}
+ >
+
+
+ {localityText.localityUploadedDescription({
+ localityTabelLabel: tables.Locality.label,
+ geoCoordDetailTableLabel: tables.GeoCoordDetail.label,
+ })}
+
+
+
- }
- ids={results.data}
- isDependent={false}
- newResource={undefined}
- table={tables.Locality}
- title={undefined}
- totalCount={results.data.length}
- onAdd={undefined}
- onClone={undefined}
- onClose={handleClose}
- onDelete={undefined}
- onSaved={f.void}
- onSlide={undefined}
- />
+
+
) : null}
>
);
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index a1b3c3d3fbe..6855a448dd0 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -26,13 +26,13 @@ import { RecordSetCreated } from './Components';
export function CreateRecordSet({
recordIds,
baseTableName,
- saveComponent,
defaultRecordSetName,
+ saveComponent,
}: {
readonly recordIds: RA | (() => RA);
readonly baseTableName: keyof Tables;
- readonly saveComponent?: () => JSX.Element;
readonly defaultRecordSetName?: string;
+ readonly saveComponent?: () => JSX.Element;
}): JSX.Element {
const [state, setState] = React.useState<
| State<'Editing', { readonly recordSet: SpecifyResource }>
diff --git a/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx b/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx
index 59ec3b8a6fd..86920d6b8ca 100644
--- a/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx
@@ -9,6 +9,7 @@ import React from 'react';
import { commonText } from '../../localization/common';
import { wbText } from '../../localization/workbench';
import { f } from '../../utils/functools';
+import type { RR, ValueOf } from '../../utils/types';
import { sortFunction } from '../../utils/utils';
import { H2, Ul } from '../Atoms';
import { Button } from '../Atoms/Button';
@@ -25,7 +26,7 @@ export function WbUploaded({
isUploaded,
onClose: handleClose,
}: {
- readonly recordCounts: Partial, number>>;
+ readonly recordCounts: Partial, number>>;
readonly dataSetId: number;
readonly dataSetName: string;
readonly isUploaded: boolean;
@@ -45,19 +46,10 @@ export function WbUploaded({
: wbText.wbUploadedPotentialDescription()}
-
- {Object.entries(recordCounts)
- .sort(sortFunction(([_tableName, recordCount]) => recordCount, false))
- .map(([tableName, recordCount], index) =>
- typeof recordCount === 'number' ? (
-
- ) : null
- )}
-
+ recordCount}
+ />
{isUploaded && (
, number>>;
+ readonly sortFunction?: (
+ value: readonly [
+ Lowercase,
+ ValueOf, number>>>
+ ]
+ ) => ValueOf, number>>>;
+}): JSX.Element {
+ const resolvedRecords =
+ typeof rawSortFunction === 'function'
+ ? Object.entries(recordCounts).sort(sortFunction(rawSortFunction))
+ : Object.entries(recordCounts);
+
+ return (
+
+ {resolvedRecords.map(([tableName, recordCount], index) =>
+ typeof recordCount === 'number' ? (
+
+ ) : null
+ )}
+
+ );
+}
+
function TableResults({
tableName,
recordCount,
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index e222a924d25..de27b21799e 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -333,4 +333,8 @@ export const localityText = createDictionary({
rowNumber: {
'en-us': 'Row Number',
},
+ localityUploadedDescription: {
+ 'en-us':
+ 'The following number of {localityTabelLabel: string} records were updated and {geoCoordDetailTableLabel: string} records were created:',
+ },
} as const);
From c53c3961a3ca3204b68d17d1fed4d8896c3418e9 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 22:05:18 -0500
Subject: [PATCH 22/71] Remove redundant initial import dialog
---
.../components/Header/ImportLocalitySet.tsx | 87 ++++++++-----------
1 file changed, 34 insertions(+), 53 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 4513877f5ad..3f7242befcf 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -87,6 +87,30 @@ export function ImportLocalitySet(): JSX.Element {
setResults(undefined);
}
+ const handleImport = (
+ columnHeaders: RA,
+ data: RA>
+ ): void => {
+ loading(
+ ajax('/api/import/locality_set/', {
+ headers: { Accept: 'application/json' },
+ expectedErrors: [Http.UNPROCESSABLE],
+ method: 'POST',
+ body: {
+ columnHeaders,
+ data,
+ },
+ }).then(({ data: rawData, status }) => {
+ const data =
+ status === 422 && typeof rawData === 'string'
+ ? (JSON.parse(rawData) as LocalityUploadResponse)
+ : rawData;
+ setData([]);
+ setResults(data);
+ })
+ );
+ };
+
return (
<>
errors.length > 0
+ )
+ )
+ handleImport(headers, data.slice(1));
}}
/>
- {Object.values(headerErrors).some((errors) => errors.length > 0) ? (
+ {Object.values(headerErrors).some((errors) => errors.length > 0) && (
- ) : data.length > 0 &&
- !Object.values(headerErrors).some((errors) => errors.length > 0) ? (
-
- ) : null}
+ )}
{results === undefined ? null : (
)}
From fd1be30660d9b20306e545f1ce16377a41b5ef0a Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 22:09:28 -0500
Subject: [PATCH 23/71] Fix infinite loading bug in query results
---
.../QueryBuilder/CreateRecordSet.tsx | 25 ++++++++++++++++---
1 file changed, 21 insertions(+), 4 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index 6855a448dd0..ddacf7392e1 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -16,7 +16,8 @@ import type { RecordSet, Tables } from '../DataModel/types';
import { raise } from '../Errors/Crash';
import { recordSetView } from '../FormParse/webOnlyViews';
import { ResourceView } from '../Forms/ResourceView';
-import { LoadingScreen } from '../Molecules/Dialog';
+import { loadingBar } from '../Molecules';
+import { Dialog } from '../Molecules/Dialog';
import { RecordSetCreated } from './Components';
/**
@@ -107,9 +108,11 @@ export function CreateRecordSet({
}}
/>
)}
- {state.type === 'Saving' && typeof saveComponent === 'function'
- ? saveComponent()
- : LoadingScreen()}
+ {state.type === 'Saving'
+ ? typeof saveComponent === 'function'
+ ? saveComponent()
+ : LoadingDialog()
+ : null}
{state.type === 'Saved' && (
);
}
+
+function LoadingDialog(): JSX.Element {
+ return (
+
+ );
+}
From 1f2183523ce7802d375840252d39e844b1e50ddd Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 22:45:58 -0500
Subject: [PATCH 24/71] Simplify onFileImport for CSV file picker
---
.../components/Header/ImportLocalitySet.tsx | 15 ++++----
.../components/Molecules/CsvFilePicker.tsx | 34 ++++++++++++-------
2 files changed, 28 insertions(+), 21 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 3f7242befcf..fe11ceb0483 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -70,7 +70,7 @@ export function ImportLocalitySet(): JSX.Element {
});
const [headers, setHeaders] = React.useState>([]);
- const [data, setData] = React.useState>>([]);
+ const [data, setData] = React.useState>>([]);
const [results, setResults] = React.useState<
LocalityUploadResponse | undefined
>(undefined);
@@ -88,8 +88,8 @@ export function ImportLocalitySet(): JSX.Element {
}
const handleImport = (
- columnHeaders: RA,
- data: RA>
+ columnHeaders: typeof headers,
+ rows: typeof data
): void => {
loading(
ajax('/api/import/locality_set/', {
@@ -98,7 +98,7 @@ export function ImportLocalitySet(): JSX.Element {
method: 'POST',
body: {
columnHeaders,
- data,
+ data: rows,
},
}).then(({ data: rawData, status }) => {
const data =
@@ -115,8 +115,7 @@ export function ImportLocalitySet(): JSX.Element {
<>
{
- const headers = data[0];
+ onFileImport={(headers, data): void => {
const foundHeaderErrors = headers.reduce(
(accumulator, currentHeader) => {
const parsedHeader = currentHeader.toLowerCase().trim() as Header;
@@ -139,14 +138,14 @@ export function ImportLocalitySet(): JSX.Element {
);
setHeaderErrors(foundHeaderErrors);
setHeaders(headers);
- setData(data.slice(1));
+ setData(data);
if (
!Object.values(foundHeaderErrors).some(
(errors) => errors.length > 0
)
)
- handleImport(headers, data.slice(1));
+ handleImport(headers, data);
}}
/>
{Object.values(headerErrors).some((errors) => errors.length > 0) && (
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
index 7669017a5c5..f2ecc377add 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
@@ -9,7 +9,12 @@ import type { GetOrSet, GetSet, RA } from '../../utils/types';
import { localized } from '../../utils/types';
import { Container, H2 } from '../Atoms';
import { Select } from '../Atoms/Form';
-import { parseCsv, wbImportPreviewSize } from '../WbImport/helpers';
+import { LoadingContext } from '../Core/Contexts';
+import {
+ extractHeader,
+ parseCsv,
+ wbImportPreviewSize,
+} from '../WbImport/helpers';
import { encodings } from '../WorkBench/encodings';
import type { AutoCompleteItem } from './AutoComplete';
import { AutoComplete } from './AutoComplete';
@@ -20,21 +25,16 @@ export function CsvFilePicker({
onFileImport: handleFileImport,
}: {
readonly header: LocalizedString;
- readonly onFileImport: ({
- data,
- hasHeader,
- encoding,
- getSetDelimiter,
- }: {
- readonly data: RA>;
- readonly hasHeader: boolean;
- readonly encoding: string;
- readonly getSetDelimiter: GetOrSet;
- }) => void;
+ readonly onFileImport: (
+ headers: RA,
+ data: RA>
+ ) => void;
}): JSX.Element {
const [file, setFile] = React.useState();
const getSetHasHeader = useStateForContext(true);
+ const loading = React.useContext(LoadingContext);
+
return (
{header}
@@ -48,7 +48,15 @@ export function CsvFilePicker({
{
+ loading(
+ parseCsv(file, encoding, getSetDelimiter).then((data) => {
+ const { header, rows } = extractHeader(data, hasHeader);
+
+ handleFileImport(header, rows);
+ })
+ );
+ }}
/>
)}
From 0a863293cc8aa57d9ea1f90120f9c0534664b6d6 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 22:51:39 -0500
Subject: [PATCH 25/71] Return in onFileImport promise
---
.../frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
index f2ecc377add..183b1a8eaf5 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
@@ -53,7 +53,7 @@ export function CsvFilePicker({
parseCsv(file, encoding, getSetDelimiter).then((data) => {
const { header, rows } = extractHeader(data, hasHeader);
- handleFileImport(header, rows);
+ return void handleFileImport(header, rows);
})
);
}}
From f45b66ce902181ca547d2bf1f4b545312c8367e7 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 8 May 2024 23:36:04 -0500
Subject: [PATCH 26/71] Fix misc. parsing bugs
---
specifyweb/specify/import_locality.py | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index e30a8261654..af212a00fc5 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -17,7 +17,7 @@
updatable_locality_fields = ['latitude1', 'longitude1', 'datum']
updatable_geocoorddetail_fields = [
- field.name for field in datamodel.get_table_strict('Geocoorddetail').fields]
+ field.name.lower() for field in datamodel.get_table_strict('Geocoorddetail').fields]
ImportModel = Literal['Locality', 'Geocoorddetail']
@@ -50,10 +50,11 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
errors: List[ParseError] = []
to_upload: List[ParseSuccess] = []
- headers = [header.strip() for header in raw_headers]
+ headers = [header.strip().lower() for header in raw_headers]
if 'guid' not in headers:
- errors.append(ParseError('guidHeaderNotProvided'))
+ errors.append(ParseError('guidHeaderNotProvided', None, None))
+ return to_upload, errors
guid_index = headers.index('guid')
updatable_locality_fields_index = [{'field': field, 'index': headers.index(
@@ -83,10 +84,10 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
locality_query) != 1 else locality_query[0].id
parsed_locality_fields = [parse_field(
- collection, 'Locality', dict['field'], dict['value'], locality_id, row_mumber) for dict in locality_values]
+ collection, 'Locality', dict['field'], dict['value'], locality_id, row_mumber) for dict in locality_values if dict['value'].strip() != ""]
parsed_geocoorddetail_fields = [parse_field(
- collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_mumber) for dict in geocoorddetail_values]
+ collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_mumber) for dict in geocoorddetail_values if dict['value'].strip() != ""]
merged_locality_result, locality_errors = merge_parse_results('Locality', parsed_locality_fields, locality_id, row_mumber)
From 1498f42126e7afe14055923fea68524cbe1e3926 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 9 May 2024 11:48:48 -0500
Subject: [PATCH 27/71] Add field to valueTooLong payload in upload test
---
specifyweb/workbench/upload/tests/testparsing.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index 23235f1cde0..a3a4f775c4e 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -432,7 +432,7 @@ def test_value_too_long(self) -> None:
self.assertIsInstance(results[0].record_result, Uploaded)
self.assertIsInstance(results[1].record_result, Uploaded)
- self.assertEqual(results[2].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='valueTooLong', payload={'maxLength': 128}, column='Species Author')]))
+ self.assertEqual(results[2].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='valueTooLong', payload={'field': 'author', 'maxLength': 128}, column='Species Author')]))
class MatchingBehaviorTests(UploadTestsBase):
From fb6a5cebb7225478c75ab1fcf9a4ca62d9a56b3e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 14 May 2024 10:01:13 -0500
Subject: [PATCH 28/71] Add separate parsing endpoint
---
specifyweb/specify/views.py | 121 ++++++++++++++++++++++++++----------
1 file changed, 89 insertions(+), 32 deletions(-)
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 0ef2fadb98c..24aebcdd661 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -22,7 +22,7 @@
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
from specifyweb.celery_tasks import app
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
-from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set
+from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set as _parse_locality_set
from . import api, models as spmodels
from .specify_jar import specify_jar
from celery.utils.log import get_task_logger # type: ignore
@@ -770,14 +770,12 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
return http.HttpResponse(f'Task {merge.taskid} is not running and cannot be aborted.')
-@openapi(schema={
- "post": {
- "requestBody": {
- "required": True,
- "content": {
- "application/json": {
- "schema": {
- "type": "object",
+locality_set_body = {
+ "required": True,
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
"properties": {
"columnHeaders": {
"type": "array",
@@ -795,10 +793,36 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
}
}
}
- }
+ }
+ }
+ }
+}
+
+locality_set_parse_error_data = {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "description": "Keys for errors which occured during parsing",
+ "type": "string",
+ "enum": localityParseErrorMessages
+ },
+ "payload": {
+ "type": "object"
+ },
+ "rowNumber": {
+ "type": "integer",
+ "minimum": 0
}
}
- },
+ }
+}
+
+
+@openapi(schema={
+ "post": {
+ "requestBody": locality_set_body,
"responses": {
"200": {
"description": "The Locality records were updated and GeocoordDetails uploaded successfully ",
@@ -842,26 +866,7 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"type": "string",
"enum": ["ParseError"]
},
- "data": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "message": {
- "description": "Keys for errors which occured during parsing",
- "type": "string",
- "enum": localityParseErrorMessages
- },
- "payload": {
- "type": "object"
- },
- "rowNumber": {
- "type": "integer",
- "minimum": 0
- }
- }
- }
- }
+ "data": locality_set_parse_error_data
}
}
}
@@ -873,11 +878,13 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
@login_maybe_required
@require_POST
def upload_locality_set(request: http.HttpRequest):
+ """Parse and upload a locality set
+ """
request_data = json.loads(request.body)
column_headers = request_data["columnHeaders"]
data = request_data["data"]
- to_upload, errors = parse_locality_set(
+ to_upload, errors = _parse_locality_set(
request.specify_collection, column_headers, data)
result = {
@@ -921,3 +928,53 @@ def upload_locality_set(request: http.HttpRequest):
result["localities"].append(locality_id)
return http.JsonResponse(result, safe=False)
+
+
+@openapi(schema={
+ "post": {
+ "requestBody": locality_set_body,
+ "responses": {
+ "200": {
+ "description": "Locality Import Set parsed successfully",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Locality Import Set not parsed successfully",
+ "content": {
+ "application/json": {
+ "schema": locality_set_parse_error_data
+ }
+ }
+ }
+ }
+ }
+})
+@login_maybe_required
+@require_POST
+def parse_locality_set(request: http.HttpRequest):
+ """Parse a locality set without making any database changes and return the results
+ """
+ request_data = json.loads(request.body)
+ column_headers = request_data["columnHeaders"]
+ data = request_data["data"]
+
+ parsed, errors = _parse_locality_set(
+ request.specify_collection, column_headers, data)
+
+ if len(errors) > 0:
+ result = [error.to_json() for error in errors]
+ return http.JsonResponse(result, status=422, safe=False)
+
+ result = [ps.locality_id for ps in parsed]
+
+ return http.JsonResponse(result, safe=False)
From 618c80530448ada77008feae4e0a96c1a4f7f396 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 16 May 2024 08:35:58 -0500
Subject: [PATCH 29/71] Hide the create record set button if has no permission
---
.../js_src/lib/components/Header/ImportLocalitySet.tsx | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index fe11ceb0483..04137708aa4 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -19,6 +19,7 @@ import type { Tables } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
+import { hasPermission, hasToolPermission } from '../Permissions/helpers';
import { ProtectedTool } from '../Permissions/PermissionDenied';
import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
import { downloadDataSet } from '../WorkBench/helpers';
@@ -239,12 +240,12 @@ function LocalityImportResults({
geocoorddetail: results.geocoorddetails.length,
}}
/>
-
+ {hasToolPermission('recordSets', 'create') && (
-
+ )}
) : null}
From f532854aeff02d4ac73249b679839fd1dd395507 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 16 May 2024 09:19:06 -0500
Subject: [PATCH 30/71] Allow specifying button type for CreateRecordSet button
---
.../components/Header/ImportLocalitySet.tsx | 18 +++++++++++-------
.../QueryBuilder/CreateRecordSet.tsx | 11 ++++++++---
2 files changed, 19 insertions(+), 10 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 04137708aa4..40041b25b6c 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -220,7 +220,17 @@ function LocalityImportResults({
) : results.type === 'Uploaded' ? (
) : null}
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index ddacf7392e1..1387715b5f0 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -1,9 +1,10 @@
import React from 'react';
+import { LocalizedString } from 'typesafe-i18n';
import type { State } from 'typesafe-reducer';
import { queryText } from '../../localization/query';
import { f } from '../../utils/functools';
-import type { RA } from '../../utils/types';
+import type { RA, ValueOf } from '../../utils/types';
import { Button } from '../Atoms/Button';
import type { SpecifyResource } from '../DataModel/legacyTypes';
import { createResource } from '../DataModel/resource';
@@ -28,11 +29,13 @@ export function CreateRecordSet({
recordIds,
baseTableName,
defaultRecordSetName,
+ buttonType = 'Small',
saveComponent,
}: {
readonly recordIds: RA | (() => RA);
readonly baseTableName: keyof Tables;
readonly defaultRecordSetName?: string;
+ readonly buttonType: Exclude;
readonly saveComponent?: () => JSX.Element;
}): JSX.Element {
const [state, setState] = React.useState<
@@ -47,9 +50,11 @@ export function CreateRecordSet({
[recordIds]
);
+ const ResolvedButton = Button[buttonType];
+
return (
<>
- {
const recordSet = new tables.RecordSet.Resource();
@@ -64,7 +69,7 @@ export function CreateRecordSet({
{queryText.createRecordSet({
recordSetTable: tables.RecordSet.label,
})}
-
+
{state.type === 'Editing' && (
Date: Thu, 16 May 2024 09:49:24 -0500
Subject: [PATCH 31/71] Remove unuesed imports
---
.../js_src/lib/components/Header/ImportLocalitySet.tsx | 3 +--
.../js_src/lib/components/QueryBuilder/CreateRecordSet.tsx | 3 +--
2 files changed, 2 insertions(+), 4 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 40041b25b6c..c671845281f 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -19,8 +19,7 @@ import type { Tables } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
-import { hasPermission, hasToolPermission } from '../Permissions/helpers';
-import { ProtectedTool } from '../Permissions/PermissionDenied';
+import { hasToolPermission } from '../Permissions/helpers';
import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
import { downloadDataSet } from '../WorkBench/helpers';
import { TableRecordCounts } from '../WorkBench/Results';
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index 1387715b5f0..8753f53f78f 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -1,10 +1,9 @@
import React from 'react';
-import { LocalizedString } from 'typesafe-i18n';
import type { State } from 'typesafe-reducer';
import { queryText } from '../../localization/query';
import { f } from '../../utils/functools';
-import type { RA, ValueOf } from '../../utils/types';
+import type { RA } from '../../utils/types';
import { Button } from '../Atoms/Button';
import type { SpecifyResource } from '../DataModel/legacyTypes';
import { createResource } from '../DataModel/resource';
From 0e64f0cb5d6f4c1f464a260c463006959d18aee8 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 16 May 2024 09:50:33 -0500
Subject: [PATCH 32/71] Make buttonType optional for CreateRecordSet button
---
.../js_src/lib/components/QueryBuilder/CreateRecordSet.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index 8753f53f78f..07bcaa8da99 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -34,7 +34,7 @@ export function CreateRecordSet({
readonly recordIds: RA | (() => RA);
readonly baseTableName: keyof Tables;
readonly defaultRecordSetName?: string;
- readonly buttonType: Exclude;
+ readonly buttonType?: Exclude;
readonly saveComponent?: () => JSX.Element;
}): JSX.Element {
const [state, setState] = React.useState<
From b6abff0f3ef1eba6a79f035c870fc390935da07e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 23 May 2024 15:58:22 -0500
Subject: [PATCH 33/71] Automatically create record set on import
---
.../components/Header/ImportLocalitySet.tsx | 78 ++++++++++++++-----
.../js_src/lib/localization/header.ts | 2 +-
2 files changed, 58 insertions(+), 22 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index c671845281f..7cceeb615c6 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -3,6 +3,7 @@ import type { LocalizedString } from 'typesafe-i18n';
import { commonText } from '../../localization/common';
import { headerText } from '../../localization/header';
+import { queryText } from '../../localization/query';
import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
@@ -10,17 +11,21 @@ import { wbText } from '../../localization/workbench';
import { ajax } from '../../utils/ajax';
import { Http } from '../../utils/ajax/definitions';
import type { IR, RA } from '../../utils/types';
+import { localized } from '../../utils/types';
import { H2 } from '../Atoms';
import { Button } from '../Atoms/Button';
import { formatConjunction } from '../Atoms/Internationalization';
+import { Link } from '../Atoms/Link';
import { LoadingContext } from '../Core/Contexts';
+import type { SerializedResource } from '../DataModel/helperTypes';
+import { createResource } from '../DataModel/resource';
import { tables } from '../DataModel/tables';
-import type { Tables } from '../DataModel/types';
+import type { RecordSet, Tables } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
+import { TableIcon } from '../Molecules/TableIcon';
import { hasToolPermission } from '../Permissions/helpers';
-import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
import { downloadDataSet } from '../WorkBench/helpers';
import { TableRecordCounts } from '../WorkBench/Results';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
@@ -74,6 +79,9 @@ export function ImportLocalitySet(): JSX.Element {
const [results, setResults] = React.useState<
LocalityUploadResponse | undefined
>(undefined);
+ const [recordSet, setRecordSet] = React.useState<
+ SerializedResource | undefined
+ >(undefined);
const loading = React.useContext(LoadingContext);
@@ -100,14 +108,34 @@ export function ImportLocalitySet(): JSX.Element {
columnHeaders,
data: rows,
},
- }).then(({ data: rawData, status }) => {
- const data =
- status === 422 && typeof rawData === 'string'
- ? (JSON.parse(rawData) as LocalityUploadResponse)
- : rawData;
- setData([]);
- setResults(data);
})
+ .then(async ({ data: rawData, status }) => {
+ const data =
+ status === 422 && typeof rawData === 'string'
+ ? (JSON.parse(rawData) as LocalityUploadResponse)
+ : rawData;
+
+ return data.type === 'Uploaded'
+ ? ([
+ data,
+ await createResource('RecordSet', {
+ name: `${new Date().toDateString()} Locality Repatriation Import`,
+ version: 1,
+ type: 0,
+ dbTableId: tables.Locality.tableId,
+ // @ts-expect-error
+ recordSetItems: data.localities.map((id) => ({
+ recordId: id,
+ })),
+ }),
+ ] as const)
+ : ([data, undefined] as const);
+ })
+ .then(([data, recordSet]) => {
+ setData([]);
+ setResults(data);
+ setRecordSet(recordSet);
+ })
);
};
@@ -199,7 +227,11 @@ export function ImportLocalitySet(): JSX.Element {
)}
{results === undefined ? null : (
-
+
)}
>
);
@@ -207,9 +239,11 @@ export function ImportLocalitySet(): JSX.Element {
function LocalityImportResults({
results,
+ recordSet,
onClose: handleClose,
}: {
readonly results: LocalityUploadResponse;
+ readonly recordSet: SerializedResource | undefined;
readonly onClose: () => void;
}): JSX.Element {
return (
@@ -219,17 +253,7 @@ function LocalityImportResults({
) : results.type === 'Uploaded' ? (
) : null}
>
diff --git a/specifyweb/frontend/js_src/lib/localization/header.ts b/specifyweb/frontend/js_src/lib/localization/header.ts
index d8725d0f67f..e500bad9624 100644
--- a/specifyweb/frontend/js_src/lib/localization/header.ts
+++ b/specifyweb/frontend/js_src/lib/localization/header.ts
@@ -144,7 +144,7 @@ export const headerText = createDictionary({
`,
},
coGeImportDataset: {
- 'en-us': 'Import CoGe Dataset'
+ 'en-us': 'Import Locality Repatriation Dataset',
},
labelName: {
'en-us': 'Label Name',
From 050f38b4604767aea897f53a5dbcc974698f1863 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 27 May 2024 10:56:38 -0500
Subject: [PATCH 34/71] Always include field which caused error
---
.../components/Header/ImportLocalitySet.tsx | 12 +++++--
specifyweb/specify/import_locality.py | 35 +++++++++++--------
specifyweb/specify/views.py | 8 ++++-
3 files changed, 36 insertions(+), 19 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 7cceeb615c6..38302ed6309 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -29,6 +29,7 @@ import { hasToolPermission } from '../Permissions/helpers';
import { downloadDataSet } from '../WorkBench/helpers';
import { TableRecordCounts } from '../WorkBench/Results';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
+import { schemaText } from '../../localization/schema';
type Header = Exclude<
Lowercase<
@@ -53,6 +54,7 @@ const requiredHeaders = new Set(['guid']);
type LocalityImportParseError = {
readonly message: string;
+ readonly field: string;
readonly payload: IR;
readonly rowNumber: number;
};
@@ -317,12 +319,14 @@ function LocalityImportErrors({
const columns = [
localityText.rowNumber(),
+ schemaText.field(),
mainText.errorMessage(),
];
const data = results.data.map(
- ({ message, payload, rowNumber }) => [
+ ({ message, payload, field, rowNumber }) => [
rowNumber.toString(),
+ field,
resolveImportLocalityErrorMessage(message, payload),
]
);
@@ -340,16 +344,18 @@ function LocalityImportErrors({
icon="error"
onClose={handleClose}
>
-
+
| {localityText.rowNumber()} |
+ {schemaText.field()} |
{mainText.errorMessage()} |
- {results.data.map(({ rowNumber, message, payload }, index) => (
+ {results.data.map(({ rowNumber, field, message, payload }, index) => (
| {rowNumber} |
+ {field} |
{resolveImportLocalityErrorMessage(message, payload)} |
))}
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index af212a00fc5..4c8dbba774d 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -24,15 +24,16 @@
class ParseError(NamedTuple):
message: Union[ParseFailureKey, LocalityParseErrorMessageKey]
+ field: Optional[str]
payload: Optional[Dict[str, Any]]
row_number: Optional[int]
@classmethod
- def from_parse_failure(cls, parse_failure: BaseParseFailure, row_number: int):
- return cls(parse_failure.message, parse_failure.payload, row_number)
+ def from_parse_failure(cls, parse_failure: BaseParseFailure, field: str, row_number: int):
+ return cls(message=parse_failure.message, field=field, payload=parse_failure.payload, row_number=row_number)
def to_json(self):
- return {"message": self.message, "payload": self.payload, "rowNumber": self.row_number}
+ return {"message": self.message, "field": self.field, "payload": self.payload, "rowNumber": self.row_number}
class ParseSuccess(NamedTuple):
@@ -53,7 +54,8 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
headers = [header.strip().lower() for header in raw_headers]
if 'guid' not in headers:
- errors.append(ParseError('guidHeaderNotProvided', None, None))
+ errors.append(ParseError(message='guidHeaderNotProvided',
+ field=None, payload=None, row_number=None))
return to_upload, errors
guid_index = headers.index('guid')
@@ -67,12 +69,12 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
guid = row[guid_index]
locality_query = spmodels.Locality.objects.filter(guid=guid)
if len(locality_query) == 0:
- errors.append(ParseError('noLocalityMatchingGuid',
- {'guid': guid}, row_mumber))
+ errors.append(ParseError(message='noLocalityMatchingGuid', field='guid',
+ payload={'guid': guid}, row_number=row_mumber))
if len(locality_query) > 1:
- errors.append(ParseError('multipleLocalitiesWithGuid', {'guid': guid, 'localityIds': list(
- locality.id for locality in locality_query)}, row_mumber))
+ errors.append(ParseError(message='multipleLocalitiesWithGuid', field=None, payload={'guid': guid, 'localityIds': list(
+ locality.id for locality in locality_query)}, row_number=row_mumber))
locality_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
for dict in updatable_locality_fields_index]
@@ -88,19 +90,21 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
parsed_geocoorddetail_fields = [parse_field(
collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_mumber) for dict in geocoorddetail_values if dict['value'].strip() != ""]
-
- merged_locality_result, locality_errors = merge_parse_results('Locality', parsed_locality_fields, locality_id, row_mumber)
-
- merged_geocoorddetail_result, geocoord_errors = merge_parse_results('Geocoorddetail', parsed_geocoorddetail_fields, locality_id, row_mumber)
+
+ merged_locality_result, locality_errors = merge_parse_results(
+ 'Locality', parsed_locality_fields, locality_id, row_mumber)
+
+ merged_geocoorddetail_result, geocoord_errors = merge_parse_results(
+ 'Geocoorddetail', parsed_geocoorddetail_fields, locality_id, row_mumber)
errors.extend([*locality_errors, *geocoord_errors])
if merged_locality_result is not None:
to_upload.append(merged_locality_result)
-
+
if merged_geocoorddetail_result is not None:
to_upload.append(merged_geocoorddetail_result)
-
+
return to_upload, errors
@@ -108,10 +112,11 @@ def parse_field(collection, table_name: ImportModel, field_name: str, field_valu
parsed = _parse_field(collection, table_name, field_name, field_value)
if isinstance(parsed, BaseParseFailure):
- return ParseError.from_parse_failure(parsed, row_number)
+ return ParseError.from_parse_failure(parsed, field_name, row_number)
else:
return ParseSuccess.from_base_parse_success(parsed, table_name, locality_id, row_number)
+
def merge_parse_results(table_name: ImportModel, results: List[Union[ParseSuccess, ParseError]], locality_id: int, row_number: int) -> Tuple[Optional[ParseSuccess], List[ParseError]]:
to_upload = {}
errors = []
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 5e7f0f3010e..f6bdba86071 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -807,8 +807,14 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"type": "string",
"enum": localityParseErrorMessages
},
+ "field": {
+ "description": "The field name which had the parsing error",
+ "type": "string"
+ },
"payload": {
- "type": "object"
+ "description": "An object containing data relating to the error",
+ "type": "object",
+ "example": {'badType': 'Preson', 'validTypes': ['Organization', 'Person', 'Other', 'Group',]}
},
"rowNumber": {
"type": "integer",
From f746ee534cc297fc6d00ef65081b2fd85e8c5121 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 27 May 2024 10:58:26 -0500
Subject: [PATCH 35/71] Rename generic instances of 'coge' to locality dataset
---
.../js_src/lib/components/Header/userToolDefinitions.ts | 2 +-
specifyweb/frontend/js_src/lib/components/Router/Routes.tsx | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
index 07b7cd1237b..94bf4de63b3 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
+++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
@@ -102,7 +102,7 @@ const rawUserTools = ensure>>>()({
[commonText.import()]: {
coGeImport: {
title: headerText.coGeImportDataset(),
- url: '/specify/import/from-coge/',
+ url: '/specify/import/locality-dataset/',
icon: icons.globe,
},
},
diff --git a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
index 839c5e25b37..6e7ea11f56b 100644
--- a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
@@ -229,7 +229,7 @@ export const routes: RA = [
path: 'import',
children: [
{
- path: 'from-coge',
+ path: 'locality-dataset',
element: () =>
import('../Header/ImportLocalitySet').then(
({ ImportLocalitySet }) => ImportLocalitySet
From a9a3975355d83a26b26a0dd2a03fb563e85b3f41 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 27 May 2024 11:18:57 -0500
Subject: [PATCH 36/71] Close columnHeaders error dialog on import
---
.../js_src/lib/components/Header/ImportLocalitySet.tsx | 9 ++++++++-
1 file changed, 8 insertions(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 38302ed6309..8c3831618f9 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -184,7 +184,14 @@ export function ImportLocalitySet(): JSX.Element {
<>
{commonText.close()}
{headerErrors.missingRequiredHeaders.length === 0 && (
- handleImport(headers, data)}>
+ {
+ const storedHeaders = headers;
+ const storedData = data;
+ handleImport(storedHeaders, storedData);
+ resetContext();
+ }}
+ >
{commonText.import()}
)}
From 56f521b70edf78d445a17c7bfc42ffa70cb7d2af Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 29 May 2024 12:15:15 -0500
Subject: [PATCH 37/71] Define generic AsyncTask abstract model
---
...5_rename_mergingstatus_spmerging_status.py | 18 ++++++++++
specifyweb/notifications/models.py | 35 +++++++++++--------
specifyweb/specify/record_merging.py | 4 +--
specifyweb/specify/views.py | 14 ++++----
4 files changed, 47 insertions(+), 24 deletions(-)
create mode 100644 specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py
diff --git a/specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py b/specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py
new file mode 100644
index 00000000000..fc087d9ca12
--- /dev/null
+++ b/specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.15 on 2024-05-29 17:06
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('notifications', '0004_rename_merge_policy'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='spmerging',
+ old_name='mergingstatus',
+ new_name='status',
+ ),
+ ]
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index 7188838dddb..d0e0fbffff2 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -1,7 +1,7 @@
from django.db import models
from django.utils import timezone
-from specifyweb.specify import models as spmodels
-from ..specify.models import Specifyuser
+from specifyweb.specify.models import Specifyuser, Collection, Agent
+
class Message(models.Model):
user = models.ForeignKey(Specifyuser, on_delete=models.CASCADE)
@@ -9,25 +9,30 @@ class Message(models.Model):
content = models.TextField()
read = models.BooleanField(default=False)
-Collection = getattr(spmodels, 'Collection')
-Specifyuser = getattr(spmodels, 'Specifyuser')
-Agent = getattr(spmodels, 'Agent')
-class Spmerging(models.Model):
- name = models.CharField(max_length=256)
- taskid = models.CharField(max_length=256)
- mergingstatus = models.CharField(max_length=256)
+class AsyncTask(models.Model):
+ taskid = models.CharField(max_length=256)
+ status = models.CharField(max_length=256)
+ timestampcreated = models.DateTimeField(default=timezone.now)
+ timestampmodified = models.DateTimeField(auto_now=True)
+ specifyuser = models.ForeignKey(Specifyuser, on_delete=models.CASCADE)
+ collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
+ createdbyagent = models.ForeignKey(
+ Agent, null=True, on_delete=models.SET_NULL, related_name="+")
+ modifiedbyagent = models.ForeignKey(
+ Agent, null=True, on_delete=models.SET_NULL, related_name="+")
+
+ class Meta:
+ abstract = True
+
+
+class Spmerging(AsyncTask):
+ name = models.CharField(max_length=256)
response = models.TextField()
table = models.CharField(max_length=256)
newrecordid = models.IntegerField(null=True)
newrecordata = models.JSONField(null=True)
oldrecordids = models.JSONField(null=True)
- collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
- specifyuser = models.ForeignKey(Specifyuser, on_delete=models.CASCADE)
- timestampcreated = models.DateTimeField(default=timezone.now)
- timestampmodified = models.DateTimeField(auto_now=True)
- createdbyagent = models.ForeignKey(Agent, null=True, on_delete=models.SET_NULL, related_name="+")
- modifiedbyagent = models.ForeignKey(Agent, null=True, on_delete=models.SET_NULL, related_name="+")
class Meta:
db_table = 'spmerging'
diff --git a/specifyweb/specify/record_merging.py b/specifyweb/specify/record_merging.py
index ed4c2fee106..71ed8511112 100644
--- a/specifyweb/specify/record_merging.py
+++ b/specifyweb/specify/record_merging.py
@@ -394,10 +394,10 @@ def progress(cur: int, additional_total: int=0) -> None:
merge_record = Spmerging.objects.get(id=merge_id)
if response.status_code != 204:
self.update_state(state='FAILED', meta={'current': current, 'total': total})
- merge_record.mergingstatus = 'FAILED'
+ merge_record.status = 'FAILED'
else:
self.update_state(state='SUCCEEDED', meta={'current': total, 'total': total})
- merge_record.mergingstatus = 'SUCCEEDED'
+ merge_record.status = 'SUCCEEDED'
merge_record.response = response.content.decode()
merge_record.save()
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index f6bdba86071..aac82fdb4ef 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -521,18 +521,18 @@ def record_merge(
if background:
# Check if another merge is still in progress
- cur_merges = Spmerging.objects.filter(mergingstatus='MERGING')
+ cur_merges = Spmerging.objects.filter(status='MERGING')
for cur_merge in cur_merges:
cur_task_id = cur_merge.taskid
cur_result = record_merge_task.AsyncResult(cur_task_id)
if cur_result is not None:
- cur_merge.mergingstatus = 'FAILED'
+ cur_merge.status = 'FAILED'
cur_merge.save()
elif cur_result.state == 'MERGING':
return http.HttpResponseNotAllowed(
'Another merge process is still running on the system, please try again later.')
else:
- cur_merge.mergingstatus = cur_result.state
+ cur_merge.status = cur_result.state
cur_merge.save()
# Create task id and a Spmerging record
@@ -540,7 +540,7 @@ def record_merge(
merge = Spmerging.objects.create(
name="Merge_" + model_name + "_" + new_model_id,
taskid=task_id,
- mergingstatus="MERGING",
+ status="MERGING",
table=model_name.title(),
newrecordid=new_model_id,
newrecordata=json.dumps(new_record_data),
@@ -679,7 +679,7 @@ def merging_status(request, merge_id: int) -> http.HttpResponse:
except Spmerging.DoesNotExist:
return http.HttpResponseNotFound(f'The merge task id is not found: {merge_id}')
- task_status = merge.mergingstatus
+ task_status = merge.status
task_progress = None
try:
@@ -695,7 +695,7 @@ def merging_status(request, merge_id: int) -> http.HttpResponse:
pass
status = {
- 'taskstatus': merge.mergingstatus,
+ 'taskstatus': merge.status,
'response': merge.response,
'taskprogress': result.info if isinstance(result.info, dict) else repr(result.info),
'taskid': merge.taskid
@@ -750,7 +750,7 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
app.control.revoke(merge.taskid, terminate=True)
# Updating the merging status
- merge.mergingstatus = 'ABORTED'
+ merge.status = 'ABORTED'
merge.save()
# Send notification the the megre task has been aborted
From 958196c432cb1ad875ecbb78d261adcd15fe154c Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 30 May 2024 10:31:12 -0500
Subject: [PATCH 38/71] Ensure consistency with AsyncTask models and other
tables
---
.../migrations/0005_auto_20240530_1512.py | 40 +++++++++++++++++++
...5_rename_mergingstatus_spmerging_status.py | 18 ---------
specifyweb/notifications/models.py | 10 ++---
3 files changed, 45 insertions(+), 23 deletions(-)
create mode 100644 specifyweb/notifications/migrations/0005_auto_20240530_1512.py
delete mode 100644 specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py
diff --git a/specifyweb/notifications/migrations/0005_auto_20240530_1512.py b/specifyweb/notifications/migrations/0005_auto_20240530_1512.py
new file mode 100644
index 00000000000..d9d2b980418
--- /dev/null
+++ b/specifyweb/notifications/migrations/0005_auto_20240530_1512.py
@@ -0,0 +1,40 @@
+# Generated by Django 3.2.15 on 2024-05-29 17:06
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('notifications', '0004_rename_merge_policy'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='spmerging',
+ old_name='mergingstatus',
+ new_name='status',
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name='collection',
+ field=models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection'),
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name='specifyuser',
+ field=models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name="createdbyagent",
+ field=models.ForeignKey(null=True, db_column="CreatedByAgentID", on_delete=models.SET_NULL, to='specify.agent', related_name="+")
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name="modifiedbyagent",
+ field=models.ForeignKey(null=True, db_column="ModifiedByAgentID", on_delete=models.SET_NULL, to='specify.agent', related_name="+")
+ )
+ ]
diff --git a/specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py b/specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py
deleted file mode 100644
index fc087d9ca12..00000000000
--- a/specifyweb/notifications/migrations/0005_rename_mergingstatus_spmerging_status.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Generated by Django 3.2.15 on 2024-05-29 17:06
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('notifications', '0004_rename_merge_policy'),
- ]
-
- operations = [
- migrations.RenameField(
- model_name='spmerging',
- old_name='mergingstatus',
- new_name='status',
- ),
- ]
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index d0e0fbffff2..704ad270130 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -1,6 +1,6 @@
from django.db import models
from django.utils import timezone
-from specifyweb.specify.models import Specifyuser, Collection, Agent
+from specifyweb.specify.models import Specifyuser, Collection, Agent, Recordset
class Message(models.Model):
@@ -15,12 +15,12 @@ class AsyncTask(models.Model):
status = models.CharField(max_length=256)
timestampcreated = models.DateTimeField(default=timezone.now)
timestampmodified = models.DateTimeField(auto_now=True)
- specifyuser = models.ForeignKey(Specifyuser, on_delete=models.CASCADE)
- collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
+ specifyuser = models.ForeignKey(Specifyuser, db_column='SpecifyUserID', on_delete=models.CASCADE)
+ collection = models.ForeignKey(Collection, db_column="CollectionID", on_delete=models.CASCADE)
createdbyagent = models.ForeignKey(
- Agent, null=True, on_delete=models.SET_NULL, related_name="+")
+ Agent, null=True, db_column="CreatedByAgentID", on_delete=models.SET_NULL, related_name="+")
modifiedbyagent = models.ForeignKey(
- Agent, null=True, on_delete=models.SET_NULL, related_name="+")
+ Agent, null=True, db_column="ModifiedByAgentID", on_delete=models.SET_NULL, related_name="+")
class Meta:
abstract = True
From 29f8f186e25970a05bd4c7cb01deeeaa87de2f86 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 30 May 2024 10:32:57 -0500
Subject: [PATCH 39/71] Add LocalityImport AsyncTask model
---
.../migrations/0006_localityimport.py | 37 +++++++++++++++++++
specifyweb/notifications/models.py | 9 +++++
2 files changed, 46 insertions(+)
create mode 100644 specifyweb/notifications/migrations/0006_localityimport.py
diff --git a/specifyweb/notifications/migrations/0006_localityimport.py b/specifyweb/notifications/migrations/0006_localityimport.py
new file mode 100644
index 00000000000..08bc6f27577
--- /dev/null
+++ b/specifyweb/notifications/migrations/0006_localityimport.py
@@ -0,0 +1,37 @@
+# Generated by Django 3.2.15 on 2024-05-30 15:26
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+import django.utils.timezone
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('specify', '__first__'),
+ ('notifications', '0005_auto_20240530_1512'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='LocalityImport',
+ fields=[
+ ('id', models.AutoField(db_column='localityimportid', primary_key=True, serialize=False, verbose_name='localityimportid')),
+ ('taskid', models.CharField(max_length=256)),
+ ('status', models.CharField(max_length=256)),
+ ('result', models.JSONField(null=True)),
+ ('collection', models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection')),
+ ('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.CASCADE, to='specify.recordset')),
+ ('specifyuser', models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ ('createdbyagent', models.ForeignKey(db_column='CreatedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
+ ('modifiedbyagent', models.ForeignKey(db_column='ModifiedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
+ ('timestampcreated', models.DateTimeField(default=django.utils.timezone.now)),
+ ('timestampmodified', models.DateTimeField(auto_now=True)),
+ ],
+ options={
+ 'db_table': 'localityimport',
+ },
+ ),
+ ]
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index 704ad270130..8010d096aa4 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -37,3 +37,12 @@ class Spmerging(AsyncTask):
class Meta:
db_table = 'spmerging'
# managed = False
+
+class LocalityImport(AsyncTask):
+ id = models.AutoField('localityimportid', primary_key=True, db_column='localityimportid')
+ result = models.JSONField(null=True)
+ recordset = models.ForeignKey(
+ Recordset, null=True, blank=True, db_column="RecordSetID", on_delete=models.CASCADE)
+
+ class Meta:
+ db_table = 'localityimport'
\ No newline at end of file
From 2a8e33dcf8b185bdaa1b90a5d60dbc81f997ebfb Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 30 May 2024 11:34:39 -0500
Subject: [PATCH 40/71] backend: Integrate Import Locality with worker and
notifications
---
.../components/Header/ImportLocalitySet.tsx | 12 +-
specifyweb/specify/import_locality.py | 149 ++++++++++-
specifyweb/specify/urls.py | 8 +-
specifyweb/specify/views.py | 233 ++++++++++++------
4 files changed, 312 insertions(+), 90 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 8c3831618f9..5f3259d4571 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -3,10 +3,11 @@ import type { LocalizedString } from 'typesafe-i18n';
import { commonText } from '../../localization/common';
import { headerText } from '../../localization/header';
-import { queryText } from '../../localization/query';
import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
+import { queryText } from '../../localization/query';
+import { schemaText } from '../../localization/schema';
import { wbText } from '../../localization/workbench';
import { ajax } from '../../utils/ajax';
import { Http } from '../../utils/ajax/definitions';
@@ -29,7 +30,6 @@ import { hasToolPermission } from '../Permissions/helpers';
import { downloadDataSet } from '../WorkBench/helpers';
import { TableRecordCounts } from '../WorkBench/Results';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
-import { schemaText } from '../../localization/schema';
type Header = Exclude<
Lowercase<
@@ -62,7 +62,7 @@ type LocalityImportParseError = {
type LocalityUploadResponse =
| {
readonly type: 'ParseError';
- readonly data: RA;
+ readonly errors: RA;
}
| {
readonly type: 'Uploaded';
@@ -102,7 +102,7 @@ export function ImportLocalitySet(): JSX.Element {
rows: typeof data
): void => {
loading(
- ajax('/api/import/locality_set/', {
+ ajax('/api/locality_set/import/', {
headers: { Accept: 'application/json' },
expectedErrors: [Http.UNPROCESSABLE],
method: 'POST',
@@ -330,7 +330,7 @@ function LocalityImportErrors({
mainText.errorMessage(),
];
- const data = results.data.map(
+ const data = results.errors.map(
({ message, payload, field, rowNumber }) => [
rowNumber.toString(),
field,
@@ -359,7 +359,7 @@ function LocalityImportErrors({
{mainText.errorMessage()} |
- {results.data.map(({ rowNumber, field, message, payload }, index) => (
+ {results.errors.map(({ rowNumber, field, message, payload }, index) => (
| {rowNumber} |
{field} |
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 4c8dbba774d..67b86109474 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -1,7 +1,14 @@
-from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union
+import json
+
+from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union, Callable, TypedDict
+from datetime import datetime
+from django.db import transaction
import specifyweb.specify.models as spmodels
+
+from specifyweb.celery_tasks import LogErrorsTask, app
from specifyweb.specify.datamodel import datamodel
+from specifyweb.notifications.models import LocalityImport, Message
from specifyweb.specify.parse import ParseFailureKey, parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
LocalityParseErrorMessageKey = Literal[
@@ -21,6 +28,58 @@
ImportModel = Literal['Locality', 'Geocoorddetail']
+Progress = Callable[[int, Optional[int]], None]
+
+
+class LocalityImportStatus:
+ PENDING = 'PENDING'
+ PROGRESS = 'PROGRESS'
+ SUCCEEDED = 'SUCCEEDED'
+ ABORTED = 'ABORTED'
+ FAILED = 'FAILED'
+
+
+@app.task(base=LogErrorsTask, bind=True)
+def import_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]]) -> None:
+
+ def progress(current: int, total: int):
+ if not self.request.called_directly:
+ self.update_state(state=LocalityImportStatus.PROGRESS, meta={
+ 'current': current, 'total': total})
+ collection = spmodels.Collection.objects.get(id=collection_id)
+ with transaction.atomic():
+ results = upload_locality_set(
+ collection, column_headers, data, progress)
+
+ li = LocalityImport.objects.get(taskid=self.request.id)
+
+ if results['type'] == 'ParseError':
+ self.update_state(state=LocalityImportStatus.FAILED)
+ li.status = LocalityImportStatus.FAILED
+ li.result = json.dumps(results['errors'])
+ Message.objects.create(user=li.specifyuser, content=json.dumps({
+ 'type': 'localityimport-failed',
+ 'taskid': li.taskid,
+ 'errors': json.dumps(results['errors'])
+ }))
+ elif results['type'] == 'Uploaded':
+ li.result = json.dumps({
+ 'localities': json.dumps(results['localities']),
+ 'geocoorddetails': json.dumps(results['geocoorddetails'])
+ })
+ li.recordset = create_localityimport_recordset(
+ collection, li.specifyuser, results['localities'])
+ self.update_state(state=LocalityImportStatus.SUCCEEDED)
+ li.status = LocalityImportStatus.SUCCEEDED
+ Message.objects.create(user=li.specifyuser, content=json.dumps({
+ 'type': 'localityimport-succeeded',
+ 'taskid': li.taskid,
+ 'recordset': li.recordset.pk,
+ 'localities': json.dumps(results['localities'])
+ }))
+
+ li.save()
+
class ParseError(NamedTuple):
message: Union[ParseFailureKey, LocalityParseErrorMessageKey]
@@ -126,3 +185,91 @@ def merge_parse_results(table_name: ImportModel, results: List[Union[ParseSucces
else:
to_upload.update(result.to_upload)
return None if len(to_upload) == 0 else ParseSuccess(to_upload, table_name, locality_id, row_number), errors
+
+
+class UploadSuccess(TypedDict):
+ type: Literal["Uploaded"]
+ localities: List[int]
+ geocoorddetails: List[int]
+
+
+class UploadParseError(TypedDict):
+ type: Literal["ParseError"]
+ data: List[ParseError]
+
+
+def upload_locality_set(collection, column_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Union[UploadSuccess, UploadParseError]:
+ to_upload, errors = parse_locality_set(collection, column_headers, data)
+ total = len(data)
+ processed = 0
+ result = {
+ "type": None,
+ }
+
+ if len(errors) > 0:
+ result["type"] = "ParseError"
+ result["errors"] = [error.to_json() for error in errors]
+ return result
+
+ result["type"] = "Uploaded"
+ result["localities"] = []
+ result["geocoorddetails"] = []
+
+ with transaction.atomic():
+ for parse_success in to_upload:
+ uploadable = parse_success.to_upload
+ model_name = parse_success.model
+ locality_id = parse_success.locality_id
+
+ if locality_id is None:
+ raise ValueError(
+ f"No matching Locality found on row {parse_success.row_number}")
+
+ model = getattr(spmodels, model_name)
+ locality = spmodels.Locality.objects.get(id=locality_id)
+
+ if model_name == 'Geocoorddetail':
+ locality.geocoorddetails.get_queryset().delete()
+ geoCoordDetail = model.objects.create(**uploadable)
+ geoCoordDetail.locality = locality
+ geoCoordDetail.save()
+ result["geocoorddetails"].append(geoCoordDetail.id)
+ elif model_name == 'Locality':
+ # Queryset.update() is not used here as it does not send pre/post save signals
+ for field, value in uploadable.items():
+ setattr(locality, field, value)
+ locality.save()
+ result["localities"].append(locality_id)
+ if progress is not None:
+ processed += 1
+ progress(processed, total)
+
+ return result
+
+
+# Example: Wed Jun 07 2023
+DATE_FORMAT = r"%a %b %d %Y"
+
+
+def create_localityimport_recordset(collection, specifyuser, locality_ids: List[int]):
+
+ locality_table_id = datamodel.get_table_strict('Locality').tableId
+
+ date_as_string = datetime.now().strftime(DATE_FORMAT)
+
+ with transaction.atomic():
+ rs = spmodels.Recordset.objects.create(
+ collectionmemberid=collection.id,
+ dbtableid=locality_table_id,
+ name=f"{date_as_string} Locality Import",
+ specifyuser=specifyuser,
+ type=0,
+ version=0
+ )
+ for locality_id in locality_ids:
+ spmodels.Recordsetitem.objects.create(
+ recordid=locality_id,
+ recordset=rs
+ )
+
+ return rs
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index e88466a48b9..0d255f286a3 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -40,8 +40,12 @@
url(r'^repair/$', tree_views.repair_tree),
])),
- url(r'^import/', include([
- url(r'^locality_set/$', views.upload_locality_set)
+ # locality set import endpoints
+ url(r'^locality_set/', include([
+ url(r'^parse/$', views.parse_locality_set),
+ url(r'^import/$', views.upload_locality_set),
+ url(r'^status/(?P[0-9a-fA-F-]+)$', views.localityimport_status),
+ url(r'^abort/(?P[0-9a-fA-F-]+)$', views.abort_localityimport_task),
])),
# generates Sp6 master key
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index aac82fdb4ef..e2fde829d86 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -5,14 +5,13 @@
import json
import mimetypes
from functools import wraps
-from typing import Callable, Union
+from typing import Union
from uuid import uuid4
from django import http
from django.conf import settings
from django.db import router, transaction, connection
-from specifyweb.notifications.models import Message, Spmerging
-from django.db.models import Q
+from specifyweb.notifications.models import Message, Spmerging, LocalityImport
from django.db.models.deletion import Collector
from django.views.decorators.cache import cache_control
from django.views.decorators.http import require_POST
@@ -22,7 +21,7 @@
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
from specifyweb.celery_tasks import app
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
-from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set as _parse_locality_set
+from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set as _parse_locality_set, import_locality_task, LocalityImportStatus
from . import api, models as spmodels
from .specify_jar import specify_jar
from celery.utils.log import get_task_logger # type: ignore
@@ -91,6 +90,7 @@ def raise_error(request):
raise Exception('This error is a test. You may now return to your regularly '
'scheduled hacking.')
+
@login_maybe_required
@require_http_methods(['GET', 'HEAD'])
def delete_blockers(request, model, id):
@@ -826,111 +826,181 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
@openapi(schema={
- "post": {
+ 'post': {
"requestBody": locality_set_body,
"responses": {
"200": {
- "description": "The Locality records were updated and GeocoordDetails uploaded successfully ",
+ "description": "Returns a GUID (job ID)",
"content": {
- "application/json": {
+ "text/plain": {
"schema": {
- "type": "object",
- "properties": {
- "type": {
- "enum": ["Uploaded"]
- },
- "localities": {
- "description": "An array of updated Locality IDs",
- "type": "array",
- "items": {
- "type": "integer",
- "minimum": 0
- }
- },
- "geocoorddetails": {
- "description": "An array of created geocoorddetail IDs",
- "type": "array",
- "items": {
- "type": "integer",
- "minimum": 0
- }
- }
- },
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
}
}
}
},
- "422": {
- "description": "Some values could not be successfully parsed",
+ }
+ },
+})
+@login_maybe_required
+@require_POST
+def upload_locality_set(request: http.HttpRequest):
+ request_data = json.loads(request.body)
+ column_headers = request_data["columnHeaders"]
+ data = request_data["data"]
+
+ task_id = str(uuid4())
+ task = import_locality_task.apply_async(
+ [request.specify_collection.id, column_headers, data], task_id=task_id)
+
+ LocalityImport.objects.create(
+ result=None,
+ taskid=task.id,
+ status=LocalityImportStatus.PENDING,
+ collection=request.specify_collection,
+ specifyuser=request.specify_user,
+ createdbyagent=request.specify_user_agent,
+ modifiedbyagent=request.specify_user_agent,
+ )
+
+ Message.objects.create(user=request.specify_user, content=json.dumps({
+ 'type': 'localityimport-starting',
+ 'taskid': task.id
+ }))
+
+ return http.JsonResponse(task.id, safe=False)
+
+
+@openapi(schema={
+ 'get': {
+ "responses": {
+ "200": {
+ "description": "Data fetched successfully",
"content": {
- "application/json": {
+ "text/plain": {
"schema": {
- "type": "object",
- "properties": {
- "type": {
+ "oneOf": [
+ {
"type": "string",
- "enum": ["ParseError"]
+ "example": "null",
+ "description": "Nothing to report"
},
- "data": locality_set_parse_error_data
- }
+ {
+ "type": "object",
+ "properties": {
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "current": {
+ "type": "number",
+ "example": 4,
+ },
+ "total": {
+ "type": "number",
+ "example": 20,
+ }
+ }
+ },
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]
+ }
+ }
+ }
+ ]
}
}
}
- }
+ },
}
+ },
+})
+@require_GET
+def localityimport_status(request: http.HttpRequest, task_id: str):
+ try:
+ locality_import = LocalityImport.objects.get(taskid=task_id)
+ except LocalityImport.DoesNotExist:
+ return http.HttpResponseNotFound(f"The localityimport with task id '{task_id}' was not found")
+
+ if not (locality_import.status in [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]):
+ return http.JsonResponse(None, safe=False)
+
+ result = import_locality_task.AsyncResult(locality_import.taskid)
+
+ status = {
+ 'taskstatus': locality_import.status,
+ 'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
}
+ return status
+
+
+@openapi(schema={
+ 'post': {
+ 'responses': {
+ '200': {
+ 'description': 'The task has been successfully aborted or it is not running and cannot be aborted',
+ 'content': {
+ 'application/json': {
+ 'schema': {
+ 'type': 'object',
+ 'properties': {
+ 'type': {
+ 'type': 'string',
+ 'enum': ["ABORTED", "NOT_RUNNING"]
+ },
+ 'message': {
+ 'type': 'string',
+ 'description': 'Response message about the status of the task'
+ },
+ },
+ },
+ },
+ },
+ },
+ '404': {
+ 'description': 'The localityimport with task id is not found',
+ },
+ },
+ },
})
-@login_maybe_required
@require_POST
-def upload_locality_set(request: http.HttpRequest):
- """Parse and upload a locality set
- """
- request_data = json.loads(request.body)
- column_headers = request_data["columnHeaders"]
- data = request_data["data"]
+@login_maybe_required
+def abort_localityimport_task(request: http.HttpRequest, taskid: str):
+ "Aborts the merge task currently running and matching the given merge/task ID"
- to_upload, errors = _parse_locality_set(
- request.specify_collection, column_headers, data)
+ try:
+ locality_import = LocalityImport.objects.get(taskid=taskid)
+ except LocalityImport.DoesNotExist:
+ return http.HttpResponseNotFound(f"The localityimport with taskid: {taskid} is not found")
+
+ task = record_merge_task.AsyncResult(locality_import.taskid)
result = {
"type": None,
- "data": []
+ "message": None
}
- if len(errors) > 0:
- result["type"] = "ParseError"
- result["data"] = [error.to_json() for error in errors]
- return http.JsonResponse(result, status=422, safe=False)
+ if task.state in [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]:
+ # Revoking and terminating the task
+ app.control.revoke(locality_import.taskid, terminate=True)
- result["type"] = "Uploaded"
- result["localities"] = []
- result["geocoorddetails"] = []
+ # Updating the merging status
+ locality_import.status = LocalityImportStatus.ABORTED
+ locality_import.save()
- with transaction.atomic():
- for parse_success in to_upload:
- uploadable = parse_success.to_upload
- model_name = parse_success.model
- locality_id = parse_success.locality_id
-
- if locality_id is None:
- raise ValueError(
- f"No matching Locality found on row {parse_success.row_number}")
-
- model = getattr(spmodels, model_name)
- locality = spmodels.Locality.objects.get(id=locality_id)
-
- if model_name == 'Geocoorddetail':
- locality.geocoorddetails.get_queryset().delete()
- geoCoordDetail = model.objects.create(**uploadable)
- geoCoordDetail.locality = locality
- geoCoordDetail.save()
- result["geocoorddetails"].append(geoCoordDetail.id)
- elif model_name == 'Locality':
- # Queryset.update() is not used here as it does not send pre/post save signals
- for field, value in uploadable.items():
- setattr(locality, field, value)
- locality.save()
- result["localities"].append(locality_id)
+ # Send notification the the megre task has been aborted
+ Message.objects.create(user=request.specify_user, content=json.dumps({
+ 'type': 'localityimport-aborted',
+ 'task_id': taskid
+ }))
+ result["type"] = "ABORTED"
+ result["message"] = f'Task {locality_import.taskid} has been aborted.'
+
+ else:
+ result["type"] = "NOT_RUNNING"
+ result["message"] = 'Task %s is not running and cannot be aborted' % locality_import.taskid
return http.JsonResponse(result, safe=False)
@@ -945,6 +1015,7 @@ def upload_locality_set(request: http.HttpRequest):
"application/json": {
"schema": {
"type": "array",
+ "description": "An array of matched Locality IDs",
"items": {
"type": "integer",
"minimum": 0
From f0edc669d26f69f325fd56d01b0b3382fbca68a9 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 4 Jun 2024 12:00:28 -0500
Subject: [PATCH 41/71] frontend: Implement Loading/Progress bar
---
specifyweb/celery_tasks.py | 15 +-
.../components/Header/ImportLocalitySet.tsx | 406 +++++++++++++-----
.../components/Header/userToolDefinitions.ts | 2 +-
.../js_src/lib/components/Merging/Status.tsx | 21 +-
.../js_src/lib/components/Merging/index.tsx | 4 +-
.../js_src/lib/components/Merging/types.ts | 4 +-
.../lib/components/WorkBench/Status.tsx | 3 +-
.../js_src/lib/localization/header.ts | 4 +-
.../js_src/lib/localization/locality.ts | 25 +-
specifyweb/specify/import_locality.py | 49 ++-
specifyweb/specify/urls.py | 2 +-
specifyweb/specify/views.py | 221 ++++++----
12 files changed, 523 insertions(+), 233 deletions(-)
diff --git a/specifyweb/celery_tasks.py b/specifyweb/celery_tasks.py
index d298f87cb33..217f471d9f8 100644
--- a/specifyweb/celery_tasks.py
+++ b/specifyweb/celery_tasks.py
@@ -17,15 +17,28 @@
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
+class CELERY_TASK_STATE:
+ """ Built-In Celery Task States
+ See https://docs.celeryq.dev/en/stable/userguide/tasks.html#built-in-states
+ """
+ FAILURE = 'FAILURE'
+ PENDING = 'PENDING'
+ RECEIVED = 'RECEIVED'
+ RETRY = 'RETRY'
+ REVOKED = 'REVOKED'
+ STARTED = 'STARTED'
+ SUCCESS = 'SUCCESS'
@app.task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}')
+
logger = get_task_logger(__name__)
class LogErrorsTask(Task):
def on_failure(self, exc, task_id, args, kwargs, einfo):
logger.exception('Celery task failure!!!1', exc_info=exc)
- super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo)
+ super(LogErrorsTask, self).on_failure(
+ exc, task_id, args, kwargs, einfo)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
index 5f3259d4571..90dff5eff52 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
@@ -1,5 +1,6 @@
import React from 'react';
import type { LocalizedString } from 'typesafe-i18n';
+import type { State } from 'typesafe-reducer';
import { commonText } from '../../localization/common';
import { headerText } from '../../localization/header';
@@ -8,26 +9,29 @@ import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
import { queryText } from '../../localization/query';
import { schemaText } from '../../localization/schema';
-import { wbText } from '../../localization/workbench';
import { ajax } from '../../utils/ajax';
-import { Http } from '../../utils/ajax/definitions';
+import { ping } from '../../utils/ajax/ping';
import type { IR, RA } from '../../utils/types';
import { localized } from '../../utils/types';
-import { H2 } from '../Atoms';
+import { H2, Progress } from '../Atoms';
import { Button } from '../Atoms/Button';
+import { Label } from '../Atoms/Form';
import { formatConjunction } from '../Atoms/Internationalization';
import { Link } from '../Atoms/Link';
+import { SECOND } from '../Atoms/timeUnits';
import { LoadingContext } from '../Core/Contexts';
import type { SerializedResource } from '../DataModel/helperTypes';
-import { createResource } from '../DataModel/resource';
+import { fetchResource } from '../DataModel/resource';
import { tables } from '../DataModel/tables';
import type { RecordSet, Tables } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
+import { useTitle } from '../Molecules/AppTitle';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
import { TableIcon } from '../Molecules/TableIcon';
import { hasToolPermission } from '../Permissions/helpers';
import { downloadDataSet } from '../WorkBench/helpers';
+import { RemainingLoadingTime } from '../WorkBench/RemainingLoadingTime';
import { TableRecordCounts } from '../WorkBench/Results';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
@@ -59,17 +63,6 @@ type LocalityImportParseError = {
readonly rowNumber: number;
};
-type LocalityUploadResponse =
- | {
- readonly type: 'ParseError';
- readonly errors: RA;
- }
- | {
- readonly type: 'Uploaded';
- readonly localities: RA;
- readonly geocoorddetails: RA;
- };
-
export function ImportLocalitySet(): JSX.Element {
const [headerErrors, setHeaderErrors] = React.useState({
missingRequiredHeaders: [] as RA,
@@ -77,13 +70,8 @@ export function ImportLocalitySet(): JSX.Element {
});
const [headers, setHeaders] = React.useState>([]);
+ const [taskId, setTaskId] = React.useState(undefined);
const [data, setData] = React.useState>>([]);
- const [results, setResults] = React.useState<
- LocalityUploadResponse | undefined
- >(undefined);
- const [recordSet, setRecordSet] = React.useState<
- SerializedResource | undefined
- >(undefined);
const loading = React.useContext(LoadingContext);
@@ -93,58 +81,29 @@ export function ImportLocalitySet(): JSX.Element {
unrecognizedHeaders: [] as RA,
});
setHeaders([]);
- setData([]);
- setResults(undefined);
}
- const handleImport = (
- columnHeaders: typeof headers,
- rows: typeof data
- ): void => {
+ function handleImport(
+ columnHeaders: RA,
+ data: RA>
+ ): void {
loading(
- ajax('/api/locality_set/import/', {
- headers: { Accept: 'application/json' },
- expectedErrors: [Http.UNPROCESSABLE],
+ ajax('/api/localityset/import/', {
method: 'POST',
+ headers: { Accept: 'application/json' },
body: {
columnHeaders,
- data: rows,
+ data,
+ createRecordSet: true,
},
- })
- .then(async ({ data: rawData, status }) => {
- const data =
- status === 422 && typeof rawData === 'string'
- ? (JSON.parse(rawData) as LocalityUploadResponse)
- : rawData;
-
- return data.type === 'Uploaded'
- ? ([
- data,
- await createResource('RecordSet', {
- name: `${new Date().toDateString()} Locality Repatriation Import`,
- version: 1,
- type: 0,
- dbTableId: tables.Locality.tableId,
- // @ts-expect-error
- recordSetItems: data.localities.map((id) => ({
- recordId: id,
- })),
- }),
- ] as const)
- : ([data, undefined] as const);
- })
- .then(([data, recordSet]) => {
- setData([]);
- setResults(data);
- setRecordSet(recordSet);
- })
+ }).then(({ data }) => setTaskId(data))
);
- };
+ }
return (
<>
{
const foundHeaderErrors = headers.reduce(
(accumulator, currentHeader) => {
@@ -162,7 +121,7 @@ export function ImportLocalitySet(): JSX.Element {
};
},
{
- missingRequiredHeaders: Array.from(requiredHeaders) as RA,
+ missingRequiredHeaders: Array.from(requiredHeaders),
unrecognizedHeaders: [] as RA,
}
);
@@ -235,80 +194,290 @@ export function ImportLocalitySet(): JSX.Element {
>
)}
- {results === undefined ? null : (
- setTaskId(undefined)}
/>
)}
>
);
}
+type Status =
+ | 'ABORTED'
+ | 'FAILED'
+ | 'PARSING'
+ | 'PENDING'
+ | 'PROGRESS'
+ | 'SUCCEEDED';
+
+const statusLocalization: { readonly [STATE in Status]: LocalizedString } = {
+ PENDING: localityText.localityImportStarting(),
+ PARSING: localityText.localityImportParsing(),
+ PROGRESS: localityText.localityImportProgressing(),
+ FAILED: localityText.localityImportFailed(),
+ ABORTED: localityText.localityImportCancelled(),
+ SUCCEEDED: localityText.localityImportSucceeded(),
+};
+
+type LocalityStatus =
+ | State<
+ 'ABORTED',
+ { readonly taskstatus: 'ABORTED'; readonly taskinfo: string }
+ >
+ | State<
+ 'FAILED',
+ {
+ readonly taskstatus: 'FAILED';
+ readonly taskinfo: {
+ readonly errors: RA;
+ };
+ }
+ >
+ | State<
+ 'PARSING',
+ {
+ readonly taskstatus: 'PARSING';
+ readonly taskinfo: {
+ readonly current: number;
+ readonly total: number;
+ };
+ }
+ >
+ | State<
+ 'PENDING',
+ { readonly taskstatus: 'PENDING'; readonly taskinfo: 'None' }
+ >
+ | State<
+ 'PROGRESS',
+ {
+ readonly taskstatus: 'PROGRESS';
+ readonly taskinfo: {
+ readonly current: number;
+ readonly total: number;
+ };
+ }
+ >
+ | State<
+ 'SUCCEEDED',
+ {
+ readonly taskstatus: 'SUCCEEDED';
+ readonly taskinfo: {
+ readonly recordsetid: number;
+ readonly localities: RA;
+ readonly geocoorddetails: RA;
+ };
+ }
+ >;
-function LocalityImportResults({
- results,
+const statusDimensionKey = 'localityimport-status';
+
+function LocalityImportStatus({
+ taskId,
+ onClose: handleClose,
+}: {
+ readonly taskId: string;
+ readonly onClose: () => void;
+}): JSX.Element {
+ const [state, setState] = React.useState({
+ taskstatus: 'PENDING',
+ type: 'PENDING',
+ taskinfo: 'None',
+ });
+
+ const [recordSet, setRecordSet] = React.useState<
+ SerializedResource | undefined
+ >(undefined);
+
+ React.useEffect(() => {
+ let destructorCalled = false;
+ const fetchStatus = () =>
+ void ajax(`/api/localityset/status/${taskId}`, {
+ headers: { Accept: 'application/json' },
+ })
+ .then(async ({ data }) => {
+ setState(data);
+ if (data.taskstatus === 'SUCCEEDED') {
+ await fetchResource('RecordSet', data.taskinfo.recordsetid).then(
+ setRecordSet
+ );
+ }
+ if (
+ !destructorCalled &&
+ (['PROGRESS', 'PARSING', 'PENDING'] as RA).includes(
+ data.taskstatus
+ )
+ )
+ globalThis.setTimeout(fetchStatus, SECOND);
+ })
+ .catch(softFail);
+
+ fetchStatus();
+ return (): void => {
+ destructorCalled = true;
+ };
+ }, [taskId]);
+
+ const loading = React.useContext(LoadingContext);
+
+ const title = statusLocalization[state.taskstatus];
+ useTitle(title);
+
+ return (['PARSING', 'PROGRESS'] as RA).includes(state.taskstatus) ? (
+
+ ) : state.taskstatus === 'SUCCEEDED' ? (
+
+ ) : state.taskstatus === 'FAILED' ? (
+
+ ) : state.taskstatus === 'PENDING' ? (
+
- {results.errors.map(({ rowNumber, field, message, payload }, index) => (
+ {errors.map(({ rowNumber, field, message, payload }, index) => (
| {rowNumber} |
{field} |
diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
index 94bf4de63b3..24abb14f585 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
+++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
@@ -101,7 +101,7 @@ const rawUserTools = ensure>>>()({
},
[commonText.import()]: {
coGeImport: {
- title: headerText.coGeImportDataset(),
+ title: headerText.importLocalityDataset(),
url: '/specify/import/locality-dataset/',
icon: icons.globe,
},
diff --git a/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx b/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
index b87dff262e6..56982b84852 100644
--- a/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
@@ -15,18 +15,19 @@ import { softFail } from '../Errors/Crash';
import { produceStackTrace } from '../Errors/stackTrace';
import { Dialog, dialogClassNames } from '../Molecules/Dialog';
import { downloadFile } from '../Molecules/FilePicker';
-import type { MergeStatus, StatusState } from './types';
+import type { MergingStatus, StatusState } from './types';
import { initialStatusState } from './types';
-const statusLocalization: { readonly [STATE in MergeStatus]: LocalizedString } =
- {
- MERGING: mergingText.merging(),
- ABORTED: mergingText.mergeFailed(),
- FAILED: mergingText.mergeFailed(),
- SUCCEEDED: mergingText.mergeSucceeded(),
- };
+const statusLocalization: {
+ readonly [STATE in MergingStatus]: LocalizedString;
+} = {
+ MERGING: mergingText.merging(),
+ ABORTED: mergingText.mergeFailed(),
+ FAILED: mergingText.mergeFailed(),
+ SUCCEEDED: mergingText.mergeSucceeded(),
+};
-export function Status({
+export function MergeStatus({
mergingId,
handleClose,
}: {
@@ -41,7 +42,7 @@ export function Status({
let destructorCalled = false;
const fetchStatus = () =>
void ajax<{
- readonly taskstatus: MergeStatus;
+ readonly taskstatus: MergingStatus;
readonly taskprogress: {
readonly total: number;
readonly current: number;
diff --git a/specifyweb/frontend/js_src/lib/components/Merging/index.tsx b/specifyweb/frontend/js_src/lib/components/Merging/index.tsx
index ec9876da944..9c52edb22da 100644
--- a/specifyweb/frontend/js_src/lib/components/Merging/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Merging/index.tsx
@@ -38,7 +38,7 @@ import { CompareRecords } from './Compare';
import { recordMergingTableSpec } from './definitions';
import { InvalidMergeRecordsDialog } from './InvalidMergeRecords';
import { mergingQueryParameter } from './queryString';
-import { Status } from './Status';
+import { MergeStatus } from './Status';
export function RecordMergingLink({
table,
@@ -272,7 +272,7 @@ function Merging({
onClose={handleClose}
>
{mergeId === undefined ? undefined : (
- {
/*
* Because we can not pass down anything from the Query Builder
diff --git a/specifyweb/frontend/js_src/lib/components/Merging/types.ts b/specifyweb/frontend/js_src/lib/components/Merging/types.ts
index eecb40adeea..f1660a64569 100644
--- a/specifyweb/frontend/js_src/lib/components/Merging/types.ts
+++ b/specifyweb/frontend/js_src/lib/components/Merging/types.ts
@@ -1,6 +1,6 @@
-export type MergeStatus = 'ABORTED' | 'FAILED' | 'MERGING' | 'SUCCEEDED';
+export type MergingStatus = 'ABORTED' | 'FAILED' | 'MERGING' | 'SUCCEEDED';
export type StatusState = {
- readonly status: MergeStatus;
+ readonly status: MergingStatus;
readonly total: number;
readonly current: number;
};
diff --git a/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx b/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx
index cc5091f30bf..1d29d167489 100644
--- a/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx
@@ -12,6 +12,7 @@ import { Http } from '../../utils/ajax/definitions';
import { Progress } from '../Atoms';
import { Button } from '../Atoms/Button';
import { Label } from '../Atoms/Form';
+import { SECOND } from '../Atoms/timeUnits';
import { error } from '../Errors/assert';
import { softFail } from '../Errors/Crash';
import { useTitle } from '../Molecules/AppTitle';
@@ -20,7 +21,7 @@ import type { Dataset, Status } from '../WbPlanView/Wrapped';
import { RemainingLoadingTime } from './RemainingLoadingTime';
// How often to query back-end
-const REFRESH_RATE = 2000;
+const REFRESH_RATE = 2 * SECOND;
export function WbStatus({
dataset,
diff --git a/specifyweb/frontend/js_src/lib/localization/header.ts b/specifyweb/frontend/js_src/lib/localization/header.ts
index e500bad9624..7a063dab14a 100644
--- a/specifyweb/frontend/js_src/lib/localization/header.ts
+++ b/specifyweb/frontend/js_src/lib/localization/header.ts
@@ -143,8 +143,8 @@ export const headerText = createDictionary({
abgeschlossen ist.
`,
},
- coGeImportDataset: {
- 'en-us': 'Import Locality Repatriation Dataset',
+ importLocalityDataset: {
+ 'en-us': 'Import Locality Data Set',
},
labelName: {
'en-us': 'Label Name',
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index ac0b05daf6a..3b76459c589 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -292,9 +292,6 @@ export const localityText = createDictionary({
'uk-ua': 'DD MM SS.SS N/S/E/W (32 45 42.84 N)',
'de-ch': 'DD MM SS.SS N/S/O/W (32 45 42.84 N)',
},
- localityimportHeader: {
- 'en-us': 'Import Locality Set',
- },
localityImportHeaderError: {
'en-us': 'Errors Found in Column Headers',
},
@@ -308,13 +305,27 @@ export const localityText = createDictionary({
localityImportedAcceptedHeaders: {
'en-us': 'Only the following headers are accepted',
},
+ localityImportStarting: {
+ 'en-us': 'Starting Locality Data Set Import',
+ },
+ localityImportParsing: {
+ 'en-us': 'Parsing Locality Data Set',
+ },
+ localityImportProgressing: {
+ 'en-us': 'Importing Locality Data Set',
+ },
+ localityImportFailed: {
+ 'en-us': 'Import Locality Data Set Failed',
+ },
+ localityImportCancelled: {
+ 'en-us': 'Import Locality Data Set Cancelled',
+ },
+ localityImportSucceeded: {
+ 'en-us': 'Locality Data Set Import Succeeded',
+ },
localityImportErrorDialogHeader: {
'en-us': 'Error(s) Occured while Parsing Dataset',
},
- localityImportErrorDialogDetails: {
- 'en-us':
- 'The following Errors occured at the provided Row (Line) Numbers of the file while parsing the Dataset: ',
- },
localityImportErrorFileName: {
comment:
'The file name which is used when Parse Errors are exported. The .csv file extension is appended to the end of this string',
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 67b86109474..0daf2d91319 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -3,6 +3,7 @@
from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union, Callable, TypedDict
from datetime import datetime
from django.db import transaction
+from celery.exceptions import Ignore, TaskRevokedError
import specifyweb.specify.models as spmodels
@@ -28,11 +29,12 @@
ImportModel = Literal['Locality', 'Geocoorddetail']
-Progress = Callable[[int, Optional[int]], None]
+Progress = Callable[[str, int, int], None]
class LocalityImportStatus:
PENDING = 'PENDING'
+ PARSING = 'PARSING'
PROGRESS = 'PROGRESS'
SUCCEEDED = 'SUCCEEDED'
ABORTED = 'ABORTED'
@@ -40,13 +42,14 @@ class LocalityImportStatus:
@app.task(base=LogErrorsTask, bind=True)
-def import_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]]) -> None:
+def import_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]], create_recordset: bool) -> None:
- def progress(current: int, total: int):
+ def progress(state, current: int, total: int):
if not self.request.called_directly:
- self.update_state(state=LocalityImportStatus.PROGRESS, meta={
+ self.update_state(state=state, meta={
'current': current, 'total': total})
collection = spmodels.Collection.objects.get(id=collection_id)
+
with transaction.atomic():
results = upload_locality_set(
collection, column_headers, data, progress)
@@ -54,7 +57,7 @@ def progress(current: int, total: int):
li = LocalityImport.objects.get(taskid=self.request.id)
if results['type'] == 'ParseError':
- self.update_state(state=LocalityImportStatus.FAILED)
+ self.update_state(LocalityImportStatus.FAILED, meta={"errors": results['errors']})
li.status = LocalityImportStatus.FAILED
li.result = json.dumps(results['errors'])
Message.objects.create(user=li.specifyuser, content=json.dumps({
@@ -63,23 +66,31 @@ def progress(current: int, total: int):
'errors': json.dumps(results['errors'])
}))
elif results['type'] == 'Uploaded':
+ li.recordset = create_localityimport_recordset(
+ collection, li.specifyuser, results['localities']) if create_recordset else None
+
+ recordset_id = None if li.recordset is None else li.recordset.pk
+
+ self.update_state(state=LocalityImportStatus.SUCCEEDED, meta={
+ "recordsetid": recordset_id, "localities": results['localities'], "geocoorddetails": results['geocoorddetails']})
li.result = json.dumps({
+ 'recordsetid': recordset_id,
'localities': json.dumps(results['localities']),
'geocoorddetails': json.dumps(results['geocoorddetails'])
})
- li.recordset = create_localityimport_recordset(
- collection, li.specifyuser, results['localities'])
- self.update_state(state=LocalityImportStatus.SUCCEEDED)
li.status = LocalityImportStatus.SUCCEEDED
Message.objects.create(user=li.specifyuser, content=json.dumps({
'type': 'localityimport-succeeded',
'taskid': li.taskid,
- 'recordset': li.recordset.pk,
+ 'recordsetid': recordset_id,
'localities': json.dumps(results['localities'])
}))
li.save()
+ # prevent Celery from overriding the State of the Task
+ raise Ignore()
+
class ParseError(NamedTuple):
message: Union[ParseFailureKey, LocalityParseErrorMessageKey]
@@ -106,7 +117,7 @@ def from_base_parse_success(cls, parse_success: BaseParseSuccess, model: ImportM
return cls(parse_success.to_upload, model, locality_id, row_number)
-def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]) -> Tuple[List[ParseSuccess], List[ParseError]]:
+def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Tuple[List[ParseSuccess], List[ParseError]]:
errors: List[ParseError] = []
to_upload: List[ParseSuccess] = []
@@ -124,6 +135,9 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
geocoorddetail_fields_index = [{'field': field, 'index': headers.index(
field)} for field in headers if field.lower() in updatable_geocoorddetail_fields]
+ processed = 0
+ total = len(data)
+
for row_mumber, row in enumerate(data):
guid = row[guid_index]
locality_query = spmodels.Locality.objects.filter(guid=guid)
@@ -164,6 +178,10 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
if merged_geocoorddetail_result is not None:
to_upload.append(merged_geocoorddetail_result)
+ if progress is not None:
+ processed += 1
+ progress(LocalityImportStatus.PARSING, processed, total)
+
return to_upload, errors
@@ -195,13 +213,11 @@ class UploadSuccess(TypedDict):
class UploadParseError(TypedDict):
type: Literal["ParseError"]
- data: List[ParseError]
+ errors: List[ParseError]
def upload_locality_set(collection, column_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Union[UploadSuccess, UploadParseError]:
- to_upload, errors = parse_locality_set(collection, column_headers, data)
- total = len(data)
- processed = 0
+ to_upload, errors = parse_locality_set(collection, column_headers, data, progress)
result = {
"type": None,
}
@@ -215,6 +231,9 @@ def upload_locality_set(collection, column_headers: List[str], data: List[List[s
result["localities"] = []
result["geocoorddetails"] = []
+ processed = 0
+ total = len(to_upload)
+
with transaction.atomic():
for parse_success in to_upload:
uploadable = parse_success.to_upload
@@ -242,7 +261,7 @@ def upload_locality_set(collection, column_headers: List[str], data: List[List[s
result["localities"].append(locality_id)
if progress is not None:
processed += 1
- progress(processed, total)
+ progress(LocalityImportStatus.PROGRESS, processed, total)
return result
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index 0d255f286a3..bd6285e1d87 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -41,7 +41,7 @@
])),
# locality set import endpoints
- url(r'^locality_set/', include([
+ url(r'^localityset/', include([
url(r'^parse/$', views.parse_locality_set),
url(r'^import/$', views.upload_locality_set),
url(r'^status/(?P[0-9a-fA-F-]+)$', views.localityimport_status),
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index e2fde829d86..51c7729223f 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -19,7 +19,7 @@
from specifyweb.middleware.general import require_GET, require_http_methods
from specifyweb.permissions.permissions import PermissionTarget, \
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
-from specifyweb.celery_tasks import app
+from specifyweb.celery_tasks import app, CELERY_TASK_STATE
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set as _parse_locality_set, import_locality_task, LocalityImportStatus
from . import api, models as spmodels
@@ -603,17 +603,6 @@ def record_merge(
return response
-CELERY_MERGE_STATUS_MAP = {
- 'PENDING': 'PENDING',
- 'STARTED': 'MERGING',
- 'SUCCESS': 'SUCCEEDED',
- 'FAILURE': 'FAILED',
- 'RETRY': 'MERGING',
- 'REVOKED': 'FAILED',
- 'REJECTED': 'FAILED'
-}
-
-
@openapi(schema={
'get': {
"responses": {
@@ -666,6 +655,9 @@ def record_merge(
}
}
},
+ '404': {
+ 'description': 'The spmerging object with task id was not found',
+ },
}
},
})
@@ -679,20 +671,7 @@ def merging_status(request, merge_id: int) -> http.HttpResponse:
except Spmerging.DoesNotExist:
return http.HttpResponseNotFound(f'The merge task id is not found: {merge_id}')
- task_status = merge.status
- task_progress = None
-
- try:
- result = record_merge_task.AsyncResult(merge.taskid)
- task_progress = result.info if isinstance(
- result.info, dict) else repr(result.info)
-
- # Update task status if necessary
- if result.state not in ['PENDING', 'STARTED', 'SUCCESS', 'RETRY']:
- task_status = CELERY_MERGE_STATUS_MAP.get(
- result.state, task_status)
- except Exception:
- pass
+ result = record_merge_task.AsyncResult(merge.taskid)
status = {
'taskstatus': merge.status,
@@ -769,34 +748,6 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
return http.HttpResponse(f'Task {merge.taskid} is not running and cannot be aborted.')
-locality_set_body = {
- "required": True,
- "content": {
- "application/json": {
- "schema": {
- "type": "object",
- "properties": {
- "columnHeaders": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "data": {
- "type": "array",
- "items": {
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- }
- }
- }
- }
-}
-
locality_set_parse_error_data = {
"type": "array",
"items": {
@@ -814,7 +765,7 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"payload": {
"description": "An object containing data relating to the error",
"type": "object",
- "example": {'badType': 'Preson', 'validTypes': ['Organization', 'Person', 'Other', 'Group',]}
+ "example": {'badType': 'Preson', 'validTypes': ['Organization', 'Person', 'Other', 'Group']}
},
"rowNumber": {
"type": "integer",
@@ -827,7 +778,38 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
@openapi(schema={
'post': {
- "requestBody": locality_set_body,
+ "requestBody": {
+ "required": True,
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "columnHeaders": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "createRecordSet": {
+ "type": "boolean",
+ "description": "When True, creates a recordset in the logged-in collection for the logged-in user with the matched/updated localities if the upload succeeds",
+ "default": True
+ }
+ }
+ }
+ }
+ }
+ },
"responses": {
"200": {
"description": "Returns a GUID (job ID)",
@@ -848,12 +830,14 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
@require_POST
def upload_locality_set(request: http.HttpRequest):
request_data = json.loads(request.body)
+
column_headers = request_data["columnHeaders"]
data = request_data["data"]
+ create_recordset = request_data.get("createRecordSet", True)
task_id = str(uuid4())
task = import_locality_task.apply_async(
- [request.specify_collection.id, column_headers, data], task_id=task_id)
+ [request.specify_collection.id, column_headers, data, create_recordset], task_id=task_id)
LocalityImport.objects.create(
result=None,
@@ -883,13 +867,24 @@ def upload_locality_set(request: http.HttpRequest):
"schema": {
"oneOf": [
{
- "type": "string",
- "example": "null",
- "description": "Nothing to report"
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.ABORTED]
+ },
+ "taskinfo": {
+ "type": "string",
+ },
+ }
},
{
"type": "object",
"properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityImportStatus.PARSING, LocalityImportStatus.PROGRESS]
+ },
"taskinfo": {
"type": "object",
"properties": {
@@ -903,9 +898,51 @@ def upload_locality_set(request: http.HttpRequest):
}
}
},
+ }
+ },
+ {
+ "type": "object",
+ "properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]
+ "enum": [LocalityImportStatus.SUCCEEDED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "recordsetid": {
+ "type": "number"
+ },
+ "localities": {
+ "type": "array",
+ "description": "An array of matched/updated Locality IDs",
+ "items": {
+ "type": "number"
+ }
+ },
+ "geocoorddetails": {
+ "type": "array",
+ "description": "An array of created GeoCoordDetail IDs",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ }
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityImportStatus.FAILED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "errors": locality_set_parse_error_data
+ }
}
}
}
@@ -914,27 +951,42 @@ def upload_locality_set(request: http.HttpRequest):
}
}
},
+ '404': {
+ 'description': 'The localityimport object with task id was not found',
+ },
}
},
})
@require_GET
-def localityimport_status(request: http.HttpRequest, task_id: str):
+def localityimport_status(request: http.HttpRequest, taskid: str):
try:
- locality_import = LocalityImport.objects.get(taskid=task_id)
+ locality_import = LocalityImport.objects.get(taskid=taskid)
except LocalityImport.DoesNotExist:
- return http.HttpResponseNotFound(f"The localityimport with task id '{task_id}' was not found")
-
- if not (locality_import.status in [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]):
- return http.JsonResponse(None, safe=False)
+ return http.HttpResponseNotFound(f"The localityimport with task id '{taskid}' was not found")
result = import_locality_task.AsyncResult(locality_import.taskid)
+ resolved_state = LocalityImportStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else result.state
+
+
status = {
- 'taskstatus': locality_import.status,
+ 'taskstatus': resolved_state,
'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
}
- return status
+ if locality_import.status == LocalityImportStatus.FAILED:
+ status["taskstatus"] = LocalityImportStatus.FAILED
+ status["taskinfo"] = {"errors": json.loads(locality_import.result)}
+ elif locality_import.status == LocalityImportStatus.SUCCEEDED:
+ status["taskstatus"] = LocalityImportStatus.SUCCEEDED
+ success_result = json.loads(locality_import.result)
+ status["taskinfo"] = {
+ "recordsetid": success_result["recordsetid"],
+ "localities": success_result["localities"],
+ "geocoorddetails": success_result["geocoorddetails"]
+ }
+
+ return http.JsonResponse(status, safe=False)
@openapi(schema={
'post': {
@@ -983,14 +1035,11 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
}
if task.state in [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]:
- # Revoking and terminating the task
app.control.revoke(locality_import.taskid, terminate=True)
- # Updating the merging status
locality_import.status = LocalityImportStatus.ABORTED
locality_import.save()
- # Send notification the the megre task has been aborted
Message.objects.create(user=request.specify_user, content=json.dumps({
'type': 'localityimport-aborted',
'task_id': taskid
@@ -1007,7 +1056,33 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
@openapi(schema={
"post": {
- "requestBody": locality_set_body,
+ "requestBody": {
+ "required": True,
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "columnHeaders": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
"responses": {
"200": {
"description": "Locality Import Set parsed successfully",
From 1f640c340ec7eb9ca9069912367f0f8c48327c7a Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 4 Jun 2024 12:19:55 -0500
Subject: [PATCH 42/71] Reorganize Locality Import Data Set Code
---
.../Status.tsx} | 299 ++----------------
.../lib/components/LocalityImport/index.tsx | 166 ++++++++++
.../lib/components/LocalityImport/types.ts | 77 +++++
.../lib/components/LocalityImport/utils.ts | 63 ++++
.../js_src/lib/components/Router/Routes.tsx | 4 +-
5 files changed, 329 insertions(+), 280 deletions(-)
rename specifyweb/frontend/js_src/lib/components/{Header/ImportLocalitySet.tsx => LocalityImport/Status.tsx} (50%)
create mode 100644 specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
create mode 100644 specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
create mode 100644 specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
diff --git a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
similarity index 50%
rename from specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
rename to specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 90dff5eff52..c83fabc30c0 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/ImportLocalitySet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -1,9 +1,7 @@
import React from 'react';
import type { LocalizedString } from 'typesafe-i18n';
-import type { State } from 'typesafe-reducer';
import { commonText } from '../../localization/common';
-import { headerText } from '../../localization/header';
import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
@@ -11,275 +9,46 @@ import { queryText } from '../../localization/query';
import { schemaText } from '../../localization/schema';
import { ajax } from '../../utils/ajax';
import { ping } from '../../utils/ajax/ping';
-import type { IR, RA } from '../../utils/types';
+import type { RA } from '../../utils/types';
import { localized } from '../../utils/types';
import { H2, Progress } from '../Atoms';
import { Button } from '../Atoms/Button';
import { Label } from '../Atoms/Form';
-import { formatConjunction } from '../Atoms/Internationalization';
import { Link } from '../Atoms/Link';
import { SECOND } from '../Atoms/timeUnits';
import { LoadingContext } from '../Core/Contexts';
import type { SerializedResource } from '../DataModel/helperTypes';
import { fetchResource } from '../DataModel/resource';
import { tables } from '../DataModel/tables';
-import type { RecordSet, Tables } from '../DataModel/types';
+import type { RecordSet } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
import { useTitle } from '../Molecules/AppTitle';
-import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
import { TableIcon } from '../Molecules/TableIcon';
import { hasToolPermission } from '../Permissions/helpers';
import { downloadDataSet } from '../WorkBench/helpers';
import { RemainingLoadingTime } from '../WorkBench/RemainingLoadingTime';
import { TableRecordCounts } from '../WorkBench/Results';
-import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
-
-type Header = Exclude<
- Lowercase<
- | keyof Tables['GeoCoordDetail']['fields']
- | keyof Tables['Locality']['fields']
- >,
- 'locality'
->;
-
-const acceptedLocalityFields: RA<
- Lowercase
-> = ['guid', 'datum', 'latitude1', 'longitude1'];
-
-const acceptedHeaders = new Set([
- ...acceptedLocalityFields,
- ...tables.GeoCoordDetail.literalFields
- .map(({ name }) => name.toLowerCase())
- .filter((header) => header !== 'locality'),
-]);
-
-const requiredHeaders = new Set(['guid']);
-
-type LocalityImportParseError = {
- readonly message: string;
- readonly field: string;
- readonly payload: IR;
- readonly rowNumber: number;
-};
-
-export function ImportLocalitySet(): JSX.Element {
- const [headerErrors, setHeaderErrors] = React.useState({
- missingRequiredHeaders: [] as RA,
- unrecognizedHeaders: [] as RA,
- });
-
- const [headers, setHeaders] = React.useState>([]);
- const [taskId, setTaskId] = React.useState(undefined);
- const [data, setData] = React.useState>>([]);
-
- const loading = React.useContext(LoadingContext);
-
- function resetContext(): void {
- setHeaderErrors({
- missingRequiredHeaders: [] as RA,
- unrecognizedHeaders: [] as RA,
- });
- setHeaders([]);
- }
-
- function handleImport(
- columnHeaders: RA,
- data: RA>
- ): void {
- loading(
- ajax('/api/localityset/import/', {
- method: 'POST',
- headers: { Accept: 'application/json' },
- body: {
- columnHeaders,
- data,
- createRecordSet: true,
- },
- }).then(({ data }) => setTaskId(data))
- );
- }
-
- return (
- <>
- {
- const foundHeaderErrors = headers.reduce(
- (accumulator, currentHeader) => {
- const parsedHeader = currentHeader.toLowerCase().trim() as Header;
- const isUnknown = !acceptedHeaders.has(parsedHeader);
-
- return {
- missingRequiredHeaders:
- accumulator.missingRequiredHeaders.filter(
- (header) => header !== parsedHeader
- ),
- unrecognizedHeaders: isUnknown
- ? [...accumulator.unrecognizedHeaders, currentHeader]
- : accumulator.unrecognizedHeaders,
- };
- },
- {
- missingRequiredHeaders: Array.from(requiredHeaders),
- unrecognizedHeaders: [] as RA,
- }
- );
- setHeaderErrors(foundHeaderErrors);
- setHeaders(headers);
- setData(data);
-
- if (
- !Object.values(foundHeaderErrors).some(
- (errors) => errors.length > 0
- )
- )
- handleImport(headers, data);
- }}
- />
- {Object.values(headerErrors).some((errors) => errors.length > 0) && (
-
- {commonText.close()}
- {headerErrors.missingRequiredHeaders.length === 0 && (
- {
- const storedHeaders = headers;
- const storedData = data;
- handleImport(storedHeaders, storedData);
- resetContext();
- }}
- >
- {commonText.import()}
-
- )}
- >
- }
- header={localityText.localityImportHeaderError()}
- icon={
- headerErrors.missingRequiredHeaders.length === 0
- ? 'warning'
- : 'error'
- }
- onClose={resetContext}
- >
- <>
- {headerErrors.missingRequiredHeaders.length > 0 && (
- <>
- {localityText.localityImportMissingHeader()}
-
- {formatConjunction(
- headerErrors.missingRequiredHeaders as RA
- )}
-
- >
- )}
- {headerErrors.unrecognizedHeaders.length > 0 && (
- <>
- {localityText.localityImportUnrecognizedHeaders()}
-
- {formatConjunction(
- headerErrors.unrecognizedHeaders as RA
- )}
-
- >
- )}
- {localityText.localityImportedAcceptedHeaders()}
-
- {formatConjunction(
- Array.from(acceptedHeaders) as unknown as RA
- )}
-
- >
-
- )}
- {taskId === undefined ? undefined : (
- setTaskId(undefined)}
- />
- )}
- >
- );
-}
-type Status =
- | 'ABORTED'
- | 'FAILED'
- | 'PARSING'
- | 'PENDING'
- | 'PROGRESS'
- | 'SUCCEEDED';
-
-const statusLocalization: { readonly [STATE in Status]: LocalizedString } = {
- PENDING: localityText.localityImportStarting(),
- PARSING: localityText.localityImportParsing(),
- PROGRESS: localityText.localityImportProgressing(),
- FAILED: localityText.localityImportFailed(),
- ABORTED: localityText.localityImportCancelled(),
- SUCCEEDED: localityText.localityImportSucceeded(),
-};
-
-type LocalityStatus =
- | State<
- 'ABORTED',
- { readonly taskstatus: 'ABORTED'; readonly taskinfo: string }
- >
- | State<
- 'FAILED',
- {
- readonly taskstatus: 'FAILED';
- readonly taskinfo: {
- readonly errors: RA;
- };
- }
- >
- | State<
- 'PARSING',
- {
- readonly taskstatus: 'PARSING';
- readonly taskinfo: {
- readonly current: number;
- readonly total: number;
- };
- }
- >
- | State<
- 'PENDING',
- { readonly taskstatus: 'PENDING'; readonly taskinfo: 'None' }
- >
- | State<
- 'PROGRESS',
- {
- readonly taskstatus: 'PROGRESS';
- readonly taskinfo: {
- readonly current: number;
- readonly total: number;
- };
- }
- >
- | State<
- 'SUCCEEDED',
- {
- readonly taskstatus: 'SUCCEEDED';
- readonly taskinfo: {
- readonly recordsetid: number;
- readonly localities: RA;
- readonly geocoorddetails: RA;
- };
- }
- >;
+import type {
+ LocalityImportParseError,
+ LocalityImportState,
+ LocalityImportTaskStatus,
+} from './types';
+import {
+ localityImportStatusLocalization,
+ resolveImportLocalityErrorMessage,
+} from './utils';
const statusDimensionKey = 'localityimport-status';
-function LocalityImportStatus({
+export function LocalityImportStatus({
taskId,
onClose: handleClose,
}: {
readonly taskId: string;
readonly onClose: () => void;
}): JSX.Element {
- const [state, setState] = React.useState({
+ const [state, setState] = React.useState({
taskstatus: 'PENDING',
type: 'PENDING',
taskinfo: 'None',
@@ -292,7 +61,7 @@ function LocalityImportStatus({
React.useEffect(() => {
let destructorCalled = false;
const fetchStatus = () =>
- void ajax(`/api/localityset/status/${taskId}`, {
+ void ajax(`/api/localityset/status/${taskId}`, {
headers: { Accept: 'application/json' },
})
.then(async ({ data }) => {
@@ -304,9 +73,9 @@ function LocalityImportStatus({
}
if (
!destructorCalled &&
- (['PROGRESS', 'PARSING', 'PENDING'] as RA).includes(
- data.taskstatus
- )
+ (
+ ['PROGRESS', 'PARSING', 'PENDING'] as RA
+ ).includes(data.taskstatus)
)
globalThis.setTimeout(fetchStatus, SECOND);
})
@@ -320,10 +89,12 @@ function LocalityImportStatus({
const loading = React.useContext(LoadingContext);
- const title = statusLocalization[state.taskstatus];
+ const title = localityImportStatusLocalization[state.taskstatus];
useTitle(title);
- return (['PARSING', 'PROGRESS'] as RA).includes(state.taskstatus) ? (
+ return (['PARSING', 'PROGRESS'] as RA).includes(
+ state.taskstatus
+ ) ? (
);
}
-
-function resolveImportLocalityErrorMessage(
- key: string,
- payload: IR
-): LocalizedString {
- const baseParseResults = resolveBackendParsingMessage(key, payload);
-
- if (baseParseResults !== undefined) {
- return baseParseResults;
- } else if (key === 'guidHeaderNotProvided') {
- return localityText.guidHeaderNotProvided();
- } else if (key === 'noLocalityMatchingGuid') {
- return localityText.noLocalityMatchingGuid({
- guid: payload.guid as string,
- });
- } else if (key === 'multipleLocalitiesWithGuid') {
- return localityText.multipleLocalitiesWithGuid({
- guid: payload.guid as string,
- localityIds: (payload.localityIds as RA).join(', '),
- });
- } else {
- return commonText.colonLine({
- label: key,
- value:
- Object.keys(payload).length === 0 ? '' : `${JSON.stringify(payload)}`,
- });
- }
-}
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
new file mode 100644
index 00000000000..e8442b05b30
--- /dev/null
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
@@ -0,0 +1,166 @@
+import React from 'react';
+import type { LocalizedString } from 'typesafe-i18n';
+
+import { commonText } from '../../localization/common';
+import { headerText } from '../../localization/header';
+import { localityText } from '../../localization/locality';
+import { ajax } from '../../utils/ajax';
+import type { RA } from '../../utils/types';
+import { H2 } from '../Atoms';
+import { Button } from '../Atoms/Button';
+import { formatConjunction } from '../Atoms/Internationalization';
+import { LoadingContext } from '../Core/Contexts';
+import { CsvFilePicker } from '../Molecules/CsvFilePicker';
+import { Dialog } from '../Molecules/Dialog';
+import { LocalityImportStatus } from './Status';
+import type { LocalityImportHeader } from './types';
+import {
+ localityImportAcceptedHeaders,
+ localityImportRequiredHeaders,
+} from './utils';
+
+export function ImportLocalityDataSet(): JSX.Element {
+ const [headerErrors, setHeaderErrors] = React.useState({
+ missingRequiredHeaders: [] as RA,
+ unrecognizedHeaders: [] as RA,
+ });
+
+ const [headers, setHeaders] = React.useState>([]);
+ const [taskId, setTaskId] = React.useState(undefined);
+ const [data, setData] = React.useState>>([]);
+
+ const loading = React.useContext(LoadingContext);
+
+ function resetContext(): void {
+ setHeaderErrors({
+ missingRequiredHeaders: [] as RA,
+ unrecognizedHeaders: [] as RA,
+ });
+ setHeaders([]);
+ }
+
+ function handleImport(
+ columnHeaders: RA,
+ data: RA>
+ ): void {
+ loading(
+ ajax('/api/localityset/import/', {
+ method: 'POST',
+ headers: { Accept: 'application/json' },
+ body: {
+ columnHeaders,
+ data,
+ createRecordSet: true,
+ },
+ }).then(({ data }) => setTaskId(data))
+ );
+ }
+
+ return (
+ <>
+ {
+ const foundHeaderErrors = headers.reduce(
+ (accumulator, currentHeader) => {
+ const parsedHeader = currentHeader
+ .toLowerCase()
+ .trim() as LocalityImportHeader;
+ const isUnknown =
+ !localityImportAcceptedHeaders.has(parsedHeader);
+
+ return {
+ missingRequiredHeaders:
+ accumulator.missingRequiredHeaders.filter(
+ (header) => header !== parsedHeader
+ ),
+ unrecognizedHeaders: isUnknown
+ ? [...accumulator.unrecognizedHeaders, currentHeader]
+ : accumulator.unrecognizedHeaders,
+ };
+ },
+ {
+ missingRequiredHeaders: Array.from(localityImportRequiredHeaders),
+ unrecognizedHeaders: [] as RA,
+ }
+ );
+ setHeaderErrors(foundHeaderErrors);
+ setHeaders(headers);
+ setData(data);
+
+ if (
+ !Object.values(foundHeaderErrors).some(
+ (errors) => errors.length > 0
+ )
+ )
+ handleImport(headers, data);
+ }}
+ />
+ {Object.values(headerErrors).some((errors) => errors.length > 0) && (
+
+ {commonText.close()}
+ {headerErrors.missingRequiredHeaders.length === 0 && (
+ {
+ const storedHeaders = headers;
+ const storedData = data;
+ handleImport(storedHeaders, storedData);
+ resetContext();
+ }}
+ >
+ {commonText.import()}
+
+ )}
+ >
+ }
+ header={localityText.localityImportHeaderError()}
+ icon={
+ headerErrors.missingRequiredHeaders.length === 0
+ ? 'warning'
+ : 'error'
+ }
+ onClose={resetContext}
+ >
+ <>
+ {headerErrors.missingRequiredHeaders.length > 0 && (
+ <>
+ {localityText.localityImportMissingHeader()}
+
+ {formatConjunction(
+ headerErrors.missingRequiredHeaders as RA
+ )}
+
+ >
+ )}
+ {headerErrors.unrecognizedHeaders.length > 0 && (
+ <>
+ {localityText.localityImportUnrecognizedHeaders()}
+
+ {formatConjunction(
+ headerErrors.unrecognizedHeaders as RA
+ )}
+
+ >
+ )}
+ {localityText.localityImportedAcceptedHeaders()}
+
+ {formatConjunction(
+ Array.from(
+ localityImportAcceptedHeaders
+ ) as unknown as RA
+ )}
+
+ >
+
+ )}
+ {taskId === undefined ? undefined : (
+ setTaskId(undefined)}
+ />
+ )}
+ >
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
new file mode 100644
index 00000000000..8a925ff03da
--- /dev/null
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
@@ -0,0 +1,77 @@
+import type { State } from 'typesafe-reducer';
+
+import type { IR, RA } from '../../utils/types';
+import type { Tables } from '../DataModel/types';
+
+export type LocalityImportHeader = Exclude<
+ Lowercase<
+ | keyof Tables['GeoCoordDetail']['fields']
+ | keyof Tables['Locality']['fields']
+ >,
+ 'locality'
+>;
+
+export type LocalityImportParseError = {
+ readonly message: string;
+ readonly field: string;
+ readonly payload: IR;
+ readonly rowNumber: number;
+};
+
+export type LocalityImportTaskStatus =
+ | 'ABORTED'
+ | 'FAILED'
+ | 'PARSING'
+ | 'PENDING'
+ | 'PROGRESS'
+ | 'SUCCEEDED';
+
+export type LocalityImportState =
+ | State<
+ 'ABORTED',
+ { readonly taskstatus: 'ABORTED'; readonly taskinfo: string }
+ >
+ | State<
+ 'FAILED',
+ {
+ readonly taskstatus: 'FAILED';
+ readonly taskinfo: {
+ readonly errors: RA;
+ };
+ }
+ >
+ | State<
+ 'PARSING',
+ {
+ readonly taskstatus: 'PARSING';
+ readonly taskinfo: {
+ readonly current: number;
+ readonly total: number;
+ };
+ }
+ >
+ | State<
+ 'PENDING',
+ { readonly taskstatus: 'PENDING'; readonly taskinfo: 'None' }
+ >
+ | State<
+ 'PROGRESS',
+ {
+ readonly taskstatus: 'PROGRESS';
+ readonly taskinfo: {
+ readonly current: number;
+ readonly total: number;
+ };
+ }
+ >
+ | State<
+ 'SUCCEEDED',
+ {
+ readonly taskstatus: 'SUCCEEDED';
+ readonly taskinfo: {
+ readonly recordsetid: number;
+ readonly localities: RA;
+ readonly geocoorddetails: RA;
+ };
+ }
+ >;
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
new file mode 100644
index 00000000000..f878bc5054d
--- /dev/null
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
@@ -0,0 +1,63 @@
+import type { LocalizedString } from 'typesafe-i18n';
+
+import { commonText } from '../../localization/common';
+import { localityText } from '../../localization/locality';
+import type { IR, RA } from '../../utils/types';
+import { tables } from '../DataModel/tables';
+import type { Tables } from '../DataModel/types';
+import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
+import type { LocalityImportHeader, LocalityImportTaskStatus } from './types';
+
+export const localityImportAcceptedLocalityFields: RA<
+ Lowercase
+> = ['guid', 'datum', 'latitude1', 'longitude1'];
+
+export const localityImportAcceptedHeaders = new Set([
+ ...localityImportAcceptedLocalityFields,
+ ...tables.GeoCoordDetail.literalFields
+ .map(({ name }) => name.toLowerCase())
+ .filter((header) => header !== 'locality'),
+]);
+
+export const localityImportRequiredHeaders = new Set([
+ 'guid',
+]);
+
+export const localityImportStatusLocalization: {
+ readonly [STATE in LocalityImportTaskStatus]: LocalizedString;
+} = {
+ PENDING: localityText.localityImportStarting(),
+ PARSING: localityText.localityImportParsing(),
+ PROGRESS: localityText.localityImportProgressing(),
+ FAILED: localityText.localityImportFailed(),
+ ABORTED: localityText.localityImportCancelled(),
+ SUCCEEDED: localityText.localityImportSucceeded(),
+};
+
+export function resolveImportLocalityErrorMessage(
+ key: string,
+ payload: IR
+): LocalizedString {
+ const baseParseResults = resolveBackendParsingMessage(key, payload);
+
+ if (baseParseResults !== undefined) {
+ return baseParseResults;
+ } else if (key === 'guidHeaderNotProvided') {
+ return localityText.guidHeaderNotProvided();
+ } else if (key === 'noLocalityMatchingGuid') {
+ return localityText.noLocalityMatchingGuid({
+ guid: payload.guid as string,
+ });
+ } else if (key === 'multipleLocalitiesWithGuid') {
+ return localityText.multipleLocalitiesWithGuid({
+ guid: payload.guid as string,
+ localityIds: (payload.localityIds as RA).join(', '),
+ });
+ } else {
+ return commonText.colonLine({
+ label: key,
+ value:
+ Object.keys(payload).length === 0 ? '' : `${JSON.stringify(payload)}`,
+ });
+ }
+}
diff --git a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
index 6e7ea11f56b..a6656eecea9 100644
--- a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
@@ -231,8 +231,8 @@ export const routes: RA = [
{
path: 'locality-dataset',
element: () =>
- import('../Header/ImportLocalitySet').then(
- ({ ImportLocalitySet }) => ImportLocalitySet
+ import('../LocalityImport').then(
+ ({ ImportLocalityDataSet: ImportLocalitySet }) => ImportLocalitySet
),
},
],
From 25b1dc596be71a53d9759442cd8f5f1a78058462 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 5 Jun 2024 12:28:14 -0500
Subject: [PATCH 43/71] Add notification renderers for locality import tool
---
.../lib/components/LocalityImport/Status.tsx | 137 +++++++++++-------
.../lib/components/LocalityImport/index.tsx | 4 +-
.../lib/components/LocalityImport/utils.ts | 25 ++--
.../js_src/lib/components/Merging/Status.tsx | 1 +
.../Notifications/NotificationRenderers.tsx | 73 ++++++++++
.../js_src/lib/localization/locality.ts | 15 +-
.../migrations/0006_localityimport.py | 2 +-
specifyweb/notifications/models.py | 2 +-
specifyweb/specify/import_locality.py | 11 +-
specifyweb/specify/urls.py | 4 +-
specifyweb/specify/views.py | 6 +-
11 files changed, 201 insertions(+), 79 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index c83fabc30c0..548374ab966 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -1,6 +1,8 @@
import React from 'react';
import type { LocalizedString } from 'typesafe-i18n';
+import { useAsyncState } from '../../hooks/useAsyncState';
+import { useBooleanState } from '../../hooks/useBooleanState';
import { commonText } from '../../localization/common';
import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
@@ -9,6 +11,7 @@ import { queryText } from '../../localization/query';
import { schemaText } from '../../localization/schema';
import { ajax } from '../../utils/ajax';
import { ping } from '../../utils/ajax/ping';
+import { f } from '../../utils/functools';
import type { RA } from '../../utils/types';
import { localized } from '../../utils/types';
import { H2, Progress } from '../Atoms';
@@ -17,15 +20,15 @@ import { Label } from '../Atoms/Form';
import { Link } from '../Atoms/Link';
import { SECOND } from '../Atoms/timeUnits';
import { LoadingContext } from '../Core/Contexts';
-import type { SerializedResource } from '../DataModel/helperTypes';
import { fetchResource } from '../DataModel/resource';
import { tables } from '../DataModel/tables';
-import type { RecordSet } from '../DataModel/types';
import { softFail } from '../Errors/Crash';
+import { RecordSelectorFromIds } from '../FormSliders/RecordSelectorFromIds';
import { useTitle } from '../Molecules/AppTitle';
import { Dialog } from '../Molecules/Dialog';
import { TableIcon } from '../Molecules/TableIcon';
import { hasToolPermission } from '../Permissions/helpers';
+import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
import { downloadDataSet } from '../WorkBench/helpers';
import { RemainingLoadingTime } from '../WorkBench/RemainingLoadingTime';
import { TableRecordCounts } from '../WorkBench/Results';
@@ -54,23 +57,14 @@ export function LocalityImportStatus({
taskinfo: 'None',
});
- const [recordSet, setRecordSet] = React.useState<
- SerializedResource | undefined
- >(undefined);
-
React.useEffect(() => {
let destructorCalled = false;
const fetchStatus = () =>
void ajax(`/api/localityset/status/${taskId}`, {
headers: { Accept: 'application/json' },
})
- .then(async ({ data }) => {
+ .then(({ data }) => {
setState(data);
- if (data.taskstatus === 'SUCCEEDED') {
- await fetchResource('RecordSet', data.taskinfo.recordsetid).then(
- setRecordSet
- );
- }
if (
!destructorCalled &&
(
@@ -87,6 +81,16 @@ export function LocalityImportStatus({
};
}, [taskId]);
+ const handleTaskCancel = React.useCallback(
+ () =>
+ loading(
+ ping(`/api/localityset/abort/${taskId}/`, {
+ method: 'POST',
+ }).catch(softFail)
+ ),
+ [taskId]
+ );
+
const loading = React.useContext(LoadingContext);
const title = localityImportStatusLocalization[state.taskstatus];
@@ -98,16 +102,15 @@ export function LocalityImportStatus({
) : state.taskstatus === 'SUCCEEDED' ? (
) : state.taskstatus === 'FAILED' ? (
@@ -118,15 +121,7 @@ export function LocalityImportStatus({
) : state.taskstatus === 'PENDING' ? (
- loading(
- ping(`/api/localityset/abort/${taskId}`, {
- method: 'POST',
- }).catch(softFail)
- )
- }
- >
+
{commonText.cancel()}
}
@@ -148,32 +143,23 @@ export function LocalityImportStatus({
function LocalityImportProgress({
header,
- taskId,
currentProgress,
total,
onClose: handleClose,
+ onTaskCancel: handleTaskCancel,
}: {
readonly header: LocalizedString;
- readonly taskId: string;
readonly currentProgress: number;
readonly total: number;
readonly onClose: () => void;
+ readonly onTaskCancel: () => void;
}): JSX.Element {
- const loading = React.useContext(LoadingContext);
const percentage = Math.round((currentProgress / total) * 100);
useTitle(localized(`${header} ${percentage}%`));
return (
- loading(
- ping(`/api/localityset/abort/${taskId}`, {
- method: 'POST',
- }).catch(softFail)
- )
- }
- >
+
{commonText.cancel()}
}
@@ -193,24 +179,34 @@ function LocalityImportProgress({
);
}
-function LocalityImportSuccess({
- header,
+export function LocalityImportSuccess({
localityIds,
geoCoordDetailIds,
- recordSet,
+ recordSetId,
onClose: handleClose,
}: {
- readonly header: LocalizedString;
readonly localityIds: RA;
readonly geoCoordDetailIds: RA;
- readonly recordSet: SerializedResource | undefined;
+ readonly recordSetId: number | undefined;
readonly onClose: () => void;
}): JSX.Element {
+ const [recordSet] = useAsyncState(
+ React.useCallback(
+ async () =>
+ recordSetId === undefined
+ ? undefined
+ : fetchResource('RecordSet', recordSetId, false),
+ [recordSetId]
+ ),
+ false
+ );
+
+ const [formsOpened, handleFormsOpened, handleFormsClosed] = useBooleanState();
+
return (
{commonText.close()}}
- dimensionsKey={statusDimensionKey}
- header={header}
+ header={localityImportStatusLocalization.SUCCEEDED}
modal={false}
onClose={handleClose}
>
@@ -230,21 +226,56 @@ function LocalityImportSuccess({
/>
- {queryText.viewRecords()}
{recordSet !== undefined && hasToolPermission('recordSets', 'read') && (
-
-
- {localized(recordSet.name)}
-
+ <>
+ {queryText.viewRecords()}
+
+
+ {localized(recordSet.name)}
+
+ >
+ )}
+ {recordSet === undefined && (
+
+
+ {queryText.browseInForms()}
+
+ {formsOpened && (
+
+ }
+ ids={localityIds}
+ isDependent={false}
+ isInRecordSet={false}
+ newResource={undefined}
+ table={tables.Locality}
+ title={localityText.localityImportResults()}
+ totalCount={localityIds.length}
+ onAdd={undefined}
+ onClone={undefined}
+ onClose={handleFormsClosed}
+ onDelete={undefined}
+ onSaved={f.void}
+ onSlide={undefined}
+ />
+ )}
+
)}
);
}
-function LocalityImportErrors({
+export function LocalityImportErrors({
errors,
onClose: handleClose,
}: {
@@ -288,7 +319,7 @@ function LocalityImportErrors({
>
}
dimensionsKey={statusDimensionKey}
- header={localityText.localityImportErrorDialogHeader()}
+ header={localityText.localityImportFailureResults()}
icon="error"
onClose={handleClose}
>
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
index e8442b05b30..341c59dddb1 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
@@ -67,7 +67,7 @@ export function ImportLocalityDataSet(): JSX.Element {
.toLowerCase()
.trim() as LocalityImportHeader;
const isUnknown =
- !localityImportAcceptedHeaders.has(parsedHeader);
+ !localityImportAcceptedHeaders().has(parsedHeader);
return {
missingRequiredHeaders:
@@ -148,7 +148,7 @@ export function ImportLocalityDataSet(): JSX.Element {
{formatConjunction(
Array.from(
- localityImportAcceptedHeaders
+ localityImportAcceptedHeaders()
) as unknown as RA
)}
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
index f878bc5054d..1ee90792154 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
@@ -2,7 +2,8 @@ import type { LocalizedString } from 'typesafe-i18n';
import { commonText } from '../../localization/common';
import { localityText } from '../../localization/locality';
-import type { IR, RA } from '../../utils/types';
+import { f } from '../../utils/functools';
+import type { IR, RA, RR } from '../../utils/types';
import { tables } from '../DataModel/tables';
import type { Tables } from '../DataModel/types';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
@@ -12,20 +13,24 @@ export const localityImportAcceptedLocalityFields: RA<
Lowercase
> = ['guid', 'datum', 'latitude1', 'longitude1'];
-export const localityImportAcceptedHeaders = new Set([
- ...localityImportAcceptedLocalityFields,
- ...tables.GeoCoordDetail.literalFields
- .map(({ name }) => name.toLowerCase())
- .filter((header) => header !== 'locality'),
-]);
+export const localityImportAcceptedHeaders = f.store(
+ () =>
+ new Set([
+ ...localityImportAcceptedLocalityFields,
+ ...tables.GeoCoordDetail.literalFields
+ .map(({ name }) => name.toLowerCase())
+ .filter((header) => header !== 'locality'),
+ ])
+);
export const localityImportRequiredHeaders = new Set([
'guid',
]);
-export const localityImportStatusLocalization: {
- readonly [STATE in LocalityImportTaskStatus]: LocalizedString;
-} = {
+export const localityImportStatusLocalization: RR<
+ LocalityImportTaskStatus,
+ LocalizedString
+> = {
PENDING: localityText.localityImportStarting(),
PARSING: localityText.localityImportParsing(),
PROGRESS: localityText.localityImportProgressing(),
diff --git a/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx b/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
index 56982b84852..625dd371205 100644
--- a/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
@@ -5,6 +5,7 @@ import { commonText } from '../../localization/common';
import { mergingText } from '../../localization/merging';
import { ajax } from '../../utils/ajax';
import { ping } from '../../utils/ajax/ping';
+import { RR } from '../../utils/types';
import { Progress } from '../Atoms';
import { Button } from '../Atoms/Button';
import { Label } from '../Atoms/Form';
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index 7fba0088032..d8fd01eb9e5 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -1,13 +1,20 @@
import React from 'react';
import type { LocalizedString } from 'typesafe-i18n';
+import { useBooleanState } from '../../hooks/useBooleanState';
+import { localityText } from '../../localization/locality';
import { mergingText } from '../../localization/merging';
import { notificationsText } from '../../localization/notifications';
import { StringToJsx } from '../../localization/utils';
import type { IR } from '../../utils/types';
+import { Button } from '../Atoms/Button';
import { Link } from '../Atoms/Link';
import { getTable } from '../DataModel/tables';
import { userInformation } from '../InitialContext/userInformation';
+import {
+ LocalityImportErrors,
+ LocalityImportSuccess,
+} from '../LocalityImport/Status';
import { mergingQueryParameter } from '../Merging/queryString';
import { FormattedResource } from '../Molecules/FormattedResource';
import { TableIcon } from '../Molecules/TableIcon';
@@ -202,6 +209,72 @@ export const notificationRenderers: IR<
)
);
},
+ 'localityimport-starting'(notification) {
+ return (
+ <>
+ {localityText.localityImportStarted()}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityimport-failed'(notification) {
+ const [isOpen, handleOpen, handleClose] = useBooleanState();
+ return (
+ <>
+ {localityText.localityImportFailed()}
+
+ {localityText.localityImportFailureResults()}
+
+ {isOpen && (
+
+ )}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityimport-aborted'(notification) {
+ return (
+ <>
+ {localityText.localityImportCancelled()}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityimport-succeeded'(notification) {
+ const [isOpen, handleOpen, handleClose] = useBooleanState();
+ return (
+ <>
+ {localityText.localityImportSucceeded()}
+
+ {localityText.localityImportResults()}
+
+ {isOpen && (
+
+ )}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
default(notification) {
console.error('Unknown notification type', { notification });
return {JSON.stringify(notification, null, 2)};
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 3b76459c589..efc19d2b155 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -323,9 +323,6 @@ export const localityText = createDictionary({
localityImportSucceeded: {
'en-us': 'Locality Data Set Import Succeeded',
},
- localityImportErrorDialogHeader: {
- 'en-us': 'Error(s) Occured while Parsing Dataset',
- },
localityImportErrorFileName: {
comment:
'The file name which is used when Parse Errors are exported. The .csv file extension is appended to the end of this string',
@@ -348,4 +345,16 @@ export const localityText = createDictionary({
'en-us':
'The following number of {localityTabelLabel: string} records were updated and {geoCoordDetailTableLabel: string} records were created:',
},
+ localityImportStarted: {
+ 'en-us': 'The Locality Data Set Import process has started',
+ },
+ localityImportResults: {
+ 'en-us': 'Locality Data Set Upload Results',
+ },
+ localityImportFailureResults: {
+ 'en-us': 'Locality Data Set Failure Results',
+ },
+ taskId: {
+ 'en-us': 'Task ID',
+ },
} as const);
diff --git a/specifyweb/notifications/migrations/0006_localityimport.py b/specifyweb/notifications/migrations/0006_localityimport.py
index 08bc6f27577..1620b319a13 100644
--- a/specifyweb/notifications/migrations/0006_localityimport.py
+++ b/specifyweb/notifications/migrations/0006_localityimport.py
@@ -23,7 +23,7 @@ class Migration(migrations.Migration):
('status', models.CharField(max_length=256)),
('result', models.JSONField(null=True)),
('collection', models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection')),
- ('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.CASCADE, to='specify.recordset')),
+ ('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.SET_NULL, to='specify.recordset')),
('specifyuser', models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('createdbyagent', models.ForeignKey(db_column='CreatedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
('modifiedbyagent', models.ForeignKey(db_column='ModifiedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index 8010d096aa4..69f55a97f2d 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -42,7 +42,7 @@ class LocalityImport(AsyncTask):
id = models.AutoField('localityimportid', primary_key=True, db_column='localityimportid')
result = models.JSONField(null=True)
recordset = models.ForeignKey(
- Recordset, null=True, blank=True, db_column="RecordSetID", on_delete=models.CASCADE)
+ Recordset, null=True, blank=True, db_column="RecordSetID", on_delete=models.SET_NULL)
class Meta:
db_table = 'localityimport'
\ No newline at end of file
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 0daf2d91319..b2bad739e08 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -3,7 +3,7 @@
from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union, Callable, TypedDict
from datetime import datetime
from django.db import transaction
-from celery.exceptions import Ignore, TaskRevokedError
+from celery.exceptions import Ignore
import specifyweb.specify.models as spmodels
@@ -57,7 +57,8 @@ def progress(state, current: int, total: int):
li = LocalityImport.objects.get(taskid=self.request.id)
if results['type'] == 'ParseError':
- self.update_state(LocalityImportStatus.FAILED, meta={"errors": results['errors']})
+ self.update_state(LocalityImportStatus.FAILED, meta={
+ "errors": results['errors']})
li.status = LocalityImportStatus.FAILED
li.result = json.dumps(results['errors'])
Message.objects.create(user=li.specifyuser, content=json.dumps({
@@ -83,7 +84,8 @@ def progress(state, current: int, total: int):
'type': 'localityimport-succeeded',
'taskid': li.taskid,
'recordsetid': recordset_id,
- 'localities': json.dumps(results['localities'])
+ 'localities': json.dumps(results['localities']),
+ 'geocoorddetails': json.dumps(results["geocoorddetails"])
}))
li.save()
@@ -217,7 +219,8 @@ class UploadParseError(TypedDict):
def upload_locality_set(collection, column_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Union[UploadSuccess, UploadParseError]:
- to_upload, errors = parse_locality_set(collection, column_headers, data, progress)
+ to_upload, errors = parse_locality_set(
+ collection, column_headers, data, progress)
result = {
"type": None,
}
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index bd6285e1d87..e97f29e00c0 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -44,8 +44,8 @@
url(r'^localityset/', include([
url(r'^parse/$', views.parse_locality_set),
url(r'^import/$', views.upload_locality_set),
- url(r'^status/(?P[0-9a-fA-F-]+)$', views.localityimport_status),
- url(r'^abort/(?P[0-9a-fA-F-]+)$', views.abort_localityimport_task),
+ url(r'^status/(?P[0-9a-fA-F-]+)/$', views.localityimport_status),
+ url(r'^abort/(?P[0-9a-fA-F-]+)/$', views.abort_localityimport_task),
])),
# generates Sp6 master key
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 51c7729223f..15c59e90131 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -968,7 +968,6 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
resolved_state = LocalityImportStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else result.state
-
status = {
'taskstatus': resolved_state,
'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
@@ -985,9 +984,10 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
"localities": success_result["localities"],
"geocoorddetails": success_result["geocoorddetails"]
}
-
+
return http.JsonResponse(status, safe=False)
+
@openapi(schema={
'post': {
'responses': {
@@ -1042,7 +1042,7 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
Message.objects.create(user=request.specify_user, content=json.dumps({
'type': 'localityimport-aborted',
- 'task_id': taskid
+ 'taskid': taskid
}))
result["type"] = "ABORTED"
result["message"] = f'Task {locality_import.taskid} has been aborted.'
From b935df73e0fa7036ca53f3a7f656316232c9c5b3 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 5 Jun 2024 12:34:55 -0500
Subject: [PATCH 44/71] Use 1-index start for rowNumbers
---
specifyweb/specify/import_locality.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index b2bad739e08..ea25b1fe530 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -140,7 +140,7 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
processed = 0
total = len(data)
- for row_mumber, row in enumerate(data):
+ for row_mumber, row in enumerate(data, start=1):
guid = row[guid_index]
locality_query = spmodels.Locality.objects.filter(guid=guid)
if len(locality_query) == 0:
From 94c2e0305463406183d871f83da1982245721e6e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 11:17:25 -0500
Subject: [PATCH 45/71] Merge Parsing and Upload states
---
.../js_src/lib/components/LocalityImport/Status.tsx | 10 ++++------
.../js_src/lib/components/LocalityImport/types.ts | 11 -----------
specifyweb/specify/import_locality.py | 10 +++++-----
specifyweb/specify/views.py | 5 +----
4 files changed, 10 insertions(+), 26 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 548374ab966..b0bdc150965 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -67,9 +67,9 @@ export function LocalityImportStatus({
setState(data);
if (
!destructorCalled &&
- (
- ['PROGRESS', 'PARSING', 'PENDING'] as RA
- ).includes(data.taskstatus)
+ (['PROGRESS', 'PENDING'] as RA).includes(
+ data.taskstatus
+ )
)
globalThis.setTimeout(fetchStatus, SECOND);
})
@@ -96,9 +96,7 @@ export function LocalityImportStatus({
const title = localityImportStatusLocalization[state.taskstatus];
useTitle(title);
- return (['PARSING', 'PROGRESS'] as RA).includes(
- state.taskstatus
- ) ? (
+ return state.taskstatus === 'PROGRESS' ? (
- | State<
- 'PARSING',
- {
- readonly taskstatus: 'PARSING';
- readonly taskinfo: {
- readonly current: number;
- readonly total: number;
- };
- }
- >
| State<
'PENDING',
{ readonly taskstatus: 'PENDING'; readonly taskinfo: 'None' }
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index ea25b1fe530..cf148fde430 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -34,7 +34,6 @@
class LocalityImportStatus:
PENDING = 'PENDING'
- PARSING = 'PARSING'
PROGRESS = 'PROGRESS'
SUCCEEDED = 'SUCCEEDED'
ABORTED = 'ABORTED'
@@ -138,7 +137,7 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
field)} for field in headers if field.lower() in updatable_geocoorddetail_fields]
processed = 0
- total = len(data)
+ total = len(data) * 2
for row_mumber, row in enumerate(data, start=1):
guid = row[guid_index]
@@ -182,7 +181,7 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
if progress is not None:
processed += 1
- progress(LocalityImportStatus.PARSING, processed, total)
+ progress(LocalityImportStatus.PROGRESS, processed, total)
return to_upload, errors
@@ -234,8 +233,8 @@ def upload_locality_set(collection, column_headers: List[str], data: List[List[s
result["localities"] = []
result["geocoorddetails"] = []
- processed = 0
- total = len(to_upload)
+ processed = len(to_upload)
+ total = len(to_upload) * 2
with transaction.atomic():
for parse_success in to_upload:
@@ -262,6 +261,7 @@ def upload_locality_set(collection, column_headers: List[str], data: List[List[s
setattr(locality, field, value)
locality.save()
result["localities"].append(locality_id)
+
if progress is not None:
processed += 1
progress(LocalityImportStatus.PROGRESS, processed, total)
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 15c59e90131..58ef0a30b81 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -24,9 +24,6 @@
from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set as _parse_locality_set, import_locality_task, LocalityImportStatus
from . import api, models as spmodels
from .specify_jar import specify_jar
-from celery.utils.log import get_task_logger # type: ignore
-logger = get_task_logger(__name__)
-
def login_maybe_required(view):
@wraps(view)
@@ -883,7 +880,7 @@ def upload_locality_set(request: http.HttpRequest):
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PARSING, LocalityImportStatus.PROGRESS]
+ "enum": [LocalityImportStatus.PROGRESS]
},
"taskinfo": {
"type": "object",
From 1a75a2e4c19246264f14cc3fdc963354a9167f4d Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 11:17:58 -0500
Subject: [PATCH 46/71] Always parse the locality and geocoordetail results as
objects
---
specifyweb/specify/views.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 58ef0a30b81..b2ae28af1ad 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -978,8 +978,8 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
success_result = json.loads(locality_import.result)
status["taskinfo"] = {
"recordsetid": success_result["recordsetid"],
- "localities": success_result["localities"],
- "geocoorddetails": success_result["geocoorddetails"]
+ "localities": json.loads(success_result["localities"]),
+ "geocoorddetails": json.loads(success_result["geocoorddetails"])
}
return http.JsonResponse(status, safe=False)
From fcaf8390c88b4d4471d4d554b8b89ac434d665da Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 11:22:37 -0500
Subject: [PATCH 47/71] Don't sync the Error dailog dimensions with progress
dialogs
---
.../frontend/js_src/lib/components/LocalityImport/Status.tsx | 1 -
1 file changed, 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index b0bdc150965..ef18639db56 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -316,7 +316,6 @@ export function LocalityImportErrors({
>
}
- dimensionsKey={statusDimensionKey}
header={localityText.localityImportFailureResults()}
icon="error"
onClose={handleClose}
From a3ad3be31e6ea94dbd1d6eeee02d100e67c4641e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 12:34:52 -0500
Subject: [PATCH 48/71] Improve appearance of the Parse Error dialog
---
.../lib/components/LocalityImport/Status.tsx | 17 ++++++++++++++---
.../js_src/lib/localization/locality.ts | 3 ---
2 files changed, 14 insertions(+), 6 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index ef18639db56..60c8bc66e41 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -7,6 +7,7 @@ import { commonText } from '../../localization/common';
import { localityText } from '../../localization/locality';
import { mainText } from '../../localization/main';
import { notificationsText } from '../../localization/notifications';
+import { preferencesText } from '../../localization/preferences';
import { queryText } from '../../localization/query';
import { schemaText } from '../../localization/schema';
import { ajax } from '../../utils/ajax';
@@ -294,7 +295,7 @@ export function LocalityImportErrors({
})}.csv`;
const columns = [
- localityText.rowNumber(),
+ preferencesText.row(),
schemaText.field(),
mainText.errorMessage(),
];
@@ -320,10 +321,20 @@ export function LocalityImportErrors({
icon="error"
onClose={handleClose}
>
-
+
- | {localityText.rowNumber()} |
+ {preferencesText.row()} |
{schemaText.field()} |
{mainText.errorMessage()} |
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index efc19d2b155..c0b25d29867 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -338,9 +338,6 @@ export const localityText = createDictionary({
'en-us':
'More than one Locality found with guid: {guid:string}. Locality IDs: {localityIds: string}',
},
- rowNumber: {
- 'en-us': 'Row Number',
- },
localityUploadedDescription: {
'en-us':
'The following number of {localityTabelLabel: string} records were updated and {geoCoordDetailTableLabel: string} records were created:',
From 67dee64185bc01172ff28f5ace89b090cf346f61 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 12:56:25 -0500
Subject: [PATCH 49/71] Resolve Typescript errors
---
.../frontend/js_src/lib/components/LocalityImport/Status.tsx | 2 ++
.../frontend/js_src/lib/components/LocalityImport/utils.ts | 1 -
.../lib/components/Notifications/NotificationRenderers.tsx | 2 +-
specifyweb/frontend/js_src/lib/localization/locality.ts | 3 ---
4 files changed, 3 insertions(+), 5 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 60c8bc66e41..f8d807b52f0 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -124,6 +124,7 @@ export function LocalityImportStatus({
{commonText.cancel()}
}
+ children={null}
dimensionsKey={statusDimensionKey}
header={title}
modal={false}
@@ -132,6 +133,7 @@ export function LocalityImportStatus({
) : (
{commonText.close()}}
+ children={null}
dimensionsKey={statusDimensionKey}
header={title}
modal={false}
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
index 1ee90792154..49911d1781e 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
@@ -32,7 +32,6 @@ export const localityImportStatusLocalization: RR<
LocalizedString
> = {
PENDING: localityText.localityImportStarting(),
- PARSING: localityText.localityImportParsing(),
PROGRESS: localityText.localityImportProgressing(),
FAILED: localityText.localityImportFailed(),
ABORTED: localityText.localityImportCancelled(),
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index d8fd01eb9e5..551c8ea8955 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -264,7 +264,7 @@ export const notificationRenderers: IR<
)}
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index c0b25d29867..b868d0ca613 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -308,9 +308,6 @@ export const localityText = createDictionary({
localityImportStarting: {
'en-us': 'Starting Locality Data Set Import',
},
- localityImportParsing: {
- 'en-us': 'Parsing Locality Data Set',
- },
localityImportProgressing: {
'en-us': 'Importing Locality Data Set',
},
From 2c2be2308920fb45437d1bccbb8fbe0750551415 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 14:14:16 -0500
Subject: [PATCH 50/71] Remove unused import
---
specifyweb/frontend/js_src/lib/components/Merging/Status.tsx | 1 -
specifyweb/specify/views.py | 1 +
2 files changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx b/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
index 625dd371205..56982b84852 100644
--- a/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Merging/Status.tsx
@@ -5,7 +5,6 @@ import { commonText } from '../../localization/common';
import { mergingText } from '../../localization/merging';
import { ajax } from '../../utils/ajax';
import { ping } from '../../utils/ajax/ping';
-import { RR } from '../../utils/types';
import { Progress } from '../Atoms';
import { Button } from '../Atoms/Button';
import { Label } from '../Atoms/Form';
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index b2ae28af1ad..7bd4c4cf7bd 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -25,6 +25,7 @@
from . import api, models as spmodels
from .specify_jar import specify_jar
+
def login_maybe_required(view):
@wraps(view)
def wrapped(request, *args, **kwargs):
From 14a4987662b7d30e26f93c6ae7e5c512f9040210 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 6 Jun 2024 15:30:42 -0500
Subject: [PATCH 51/71] Don't show loading dialog while cancelling task
---
.../js_src/lib/components/LocalityImport/Status.tsx | 10 +++-------
1 file changed, 3 insertions(+), 7 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index f8d807b52f0..fbd6af260bd 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -84,16 +84,12 @@ export function LocalityImportStatus({
const handleTaskCancel = React.useCallback(
() =>
- loading(
- ping(`/api/localityset/abort/${taskId}/`, {
- method: 'POST',
- }).catch(softFail)
- ),
+ void ping(`/api/localityset/abort/${taskId}/`, {
+ method: 'POST',
+ }).catch(softFail),
[taskId]
);
- const loading = React.useContext(LoadingContext);
-
const title = localityImportStatusLocalization[state.taskstatus];
useTitle(title);
From e97b52663bc6753181a686bb739d0b220b19cf00 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Fri, 7 Jun 2024 11:36:07 -0500
Subject: [PATCH 52/71] Add option to disable 'First Row is Header' for
CSVFilePreview
---
.../lib/components/LocalityImport/index.tsx | 1 +
.../components/Molecules/CsvFilePicker.tsx | 6 ++++--
.../lib/components/Molecules/FilePicker.tsx | 20 ++++++++++++-------
3 files changed, 18 insertions(+), 9 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
index 341c59dddb1..60149181f14 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
@@ -59,6 +59,7 @@ export function ImportLocalityDataSet(): JSX.Element {
return (
<>
{
const foundHeaderErrors = headers.reduce(
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
index 183b1a8eaf5..7d574550fe2 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/CsvFilePicker.tsx
@@ -22,9 +22,11 @@ import { FilePicker, Layout } from './FilePicker';
export function CsvFilePicker({
header,
+ firstRowAlwaysHeader = false,
onFileImport: handleFileImport,
}: {
readonly header: LocalizedString;
+ readonly firstRowAlwaysHeader?: boolean;
readonly onFileImport: (
headers: RA,
data: RA>
@@ -47,7 +49,7 @@ export function CsvFilePicker({
{typeof file === 'object' && (
{
loading(
parseCsv(file, encoding, getSetDelimiter).then((data) => {
@@ -70,7 +72,7 @@ export function CsvFilePreview({
onFileImport: handleFileImport,
}: {
readonly file: File;
- readonly getSetHasHeader: GetOrSet;
+ readonly getSetHasHeader?: GetOrSet;
readonly children?: JSX.Element | undefined;
readonly onFileImport: ({
data,
diff --git a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
index 2a652ae3c1c..f70445b563d 100644
--- a/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Molecules/FilePicker.tsx
@@ -6,6 +6,7 @@ import { attachmentsText } from '../../localization/attachments';
import { commonText } from '../../localization/common';
import { wbText } from '../../localization/workbench';
import type { GetOrSet, RA } from '../../utils/types';
+import { SET } from '../../utils/utils';
import { H3 } from '../Atoms';
import { Button } from '../Atoms/Button';
import { className } from '../Atoms/className';
@@ -185,24 +186,29 @@ export const fileToText = async (
export function Layout({
preview,
- getSetHasHeader: [hasHeader = true, setHasHeader],
+ getSetHasHeader,
children,
onFileImport: handleFileImport,
}: {
readonly preview: LocalizedString | RA> | undefined;
- readonly getSetHasHeader: GetOrSet;
+ readonly getSetHasHeader?: GetOrSet;
readonly children?: JSX.Element | RA;
readonly onFileImport: (hasHeader: boolean) => void;
}): JSX.Element {
+ const [hasHeader = true] =
+ getSetHasHeader === undefined ? [undefined] : getSetHasHeader;
+
return (
<>
{children}
-
+ {getSetHasHeader !== undefined && (
+
+ )}
Date: Fri, 7 Jun 2024 11:36:40 -0500
Subject: [PATCH 53/71] Improve appearance of LocalityImport Error dialog
---
.../lib/components/LocalityImport/Status.tsx | 14 +++++++++++---
1 file changed, 11 insertions(+), 3 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index fbd6af260bd..18a280035bc 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -25,6 +25,7 @@ import { fetchResource } from '../DataModel/resource';
import { tables } from '../DataModel/tables';
import { softFail } from '../Errors/Crash';
import { RecordSelectorFromIds } from '../FormSliders/RecordSelectorFromIds';
+import { mergeCellBackground } from '../Merging/Header';
import { useTitle } from '../Molecules/AppTitle';
import { Dialog } from '../Molecules/Dialog';
import { TableIcon } from '../Molecules/TableIcon';
@@ -317,6 +318,7 @@ export function LocalityImportErrors({
}
header={localityText.localityImportFailureResults()}
icon="error"
+ specialMode="noGradient"
onClose={handleClose}
>
- | {preferencesText.row()} |
- {schemaText.field()} |
- {mainText.errorMessage()} |
+
+ {preferencesText.row()}
+ |
+
+ {schemaText.field()}
+ |
+
+ {mainText.errorMessage()}
+ |
{errors.map(({ rowNumber, field, message, payload }, index) => (
From 7c4360e763057b83a4302faf95ec4f97ac1ca3c3 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 10 Jun 2024 07:34:41 -0500
Subject: [PATCH 54/71] Restore Parsing State
---
.../js_src/lib/components/LocalityImport/Status.tsx | 8 ++++----
.../js_src/lib/components/LocalityImport/types.ts | 11 +++++++++++
.../js_src/lib/components/LocalityImport/utils.ts | 1 +
.../frontend/js_src/lib/localization/locality.ts | 3 +++
specifyweb/specify/import_locality.py | 9 +++++----
specifyweb/specify/views.py | 4 ++--
6 files changed, 26 insertions(+), 10 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 18a280035bc..804b5ff0b68 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -69,9 +69,9 @@ export function LocalityImportStatus({
setState(data);
if (
!destructorCalled &&
- (['PROGRESS', 'PENDING'] as RA).includes(
- data.taskstatus
- )
+ (
+ ['PENDING', 'PARSING', 'PROGRESS'] as RA
+ ).includes(data.taskstatus)
)
globalThis.setTimeout(fetchStatus, SECOND);
})
@@ -94,7 +94,7 @@ export function LocalityImportStatus({
const title = localityImportStatusLocalization[state.taskstatus];
useTitle(title);
- return state.taskstatus === 'PROGRESS' ? (
+ return state.taskstatus === 'PARSING' || state.taskstatus === 'PROGRESS' ? (
+ | State<
+ 'PARSING',
+ {
+ readonly taskstatus: 'PARSING';
+ readonly taskinfo: {
+ readonly current: number;
+ readonly total: number;
+ };
+ }
+ >
| State<
'PENDING',
{ readonly taskstatus: 'PENDING'; readonly taskinfo: 'None' }
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
index 49911d1781e..d2731ce9804 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
@@ -31,6 +31,7 @@ export const localityImportStatusLocalization: RR<
LocalityImportTaskStatus,
LocalizedString
> = {
+ PARSING: localityText.localityImportParsing(),
PENDING: localityText.localityImportStarting(),
PROGRESS: localityText.localityImportProgressing(),
FAILED: localityText.localityImportFailed(),
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 3d79b81127f..78d4ddd4adf 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -308,6 +308,9 @@ export const localityText = createDictionary({
localityImportStarting: {
'en-us': 'Starting Locality Data Set Import',
},
+ localityImportParsing: {
+ 'en-us': 'Parsing Locality Data Set',
+ },
localityImportProgressing: {
'en-us': 'Importing Locality Data Set',
},
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index cf148fde430..4b1c8c167dc 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -34,6 +34,7 @@
class LocalityImportStatus:
PENDING = 'PENDING'
+ PARSING = 'PARSING'
PROGRESS = 'PROGRESS'
SUCCEEDED = 'SUCCEEDED'
ABORTED = 'ABORTED'
@@ -137,7 +138,7 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
field)} for field in headers if field.lower() in updatable_geocoorddetail_fields]
processed = 0
- total = len(data) * 2
+ total = len(data)
for row_mumber, row in enumerate(data, start=1):
guid = row[guid_index]
@@ -181,7 +182,7 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
if progress is not None:
processed += 1
- progress(LocalityImportStatus.PROGRESS, processed, total)
+ progress(LocalityImportStatus.PARSING, processed, total)
return to_upload, errors
@@ -233,8 +234,8 @@ def upload_locality_set(collection, column_headers: List[str], data: List[List[s
result["localities"] = []
result["geocoorddetails"] = []
- processed = len(to_upload)
- total = len(to_upload) * 2
+ processed = 0
+ total = len(to_upload)
with transaction.atomic():
for parse_success in to_upload:
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 7bd4c4cf7bd..bba00c4319d 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -881,7 +881,7 @@ def upload_locality_set(request: http.HttpRequest):
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PROGRESS]
+ "enum": [LocalityImportStatus.PROGRESS, LocalityImportStatus.PARSING]
},
"taskinfo": {
"type": "object",
@@ -1032,7 +1032,7 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
"message": None
}
- if task.state in [LocalityImportStatus.PENDING, LocalityImportStatus.PROGRESS]:
+ if task.state in [LocalityImportStatus.PENDING, LocalityImportStatus.PARSING, LocalityImportStatus.PROGRESS]:
app.control.revoke(locality_import.taskid, terminate=True)
locality_import.status = LocalityImportStatus.ABORTED
From 618402ef20eb52a0f504fee308d5b7aab37b2c36 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 12 Jun 2024 15:05:21 -0500
Subject: [PATCH 55/71] Add csv-parse to Jest transformIgnorePatterns
---
specifyweb/frontend/js_src/jest.config.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/jest.config.ts b/specifyweb/frontend/js_src/jest.config.ts
index 4694c3ac4ac..d31f97e2fe4 100644
--- a/specifyweb/frontend/js_src/jest.config.ts
+++ b/specifyweb/frontend/js_src/jest.config.ts
@@ -224,7 +224,7 @@ const config: Config.InitialOptions = {
// See https://stackoverflow.com/questions/69075510/jest-tests-failing-on-d3-import
transformIgnorePatterns: [
- '/node_modules/(?!d3|d3-array|internmap|delaunator|robust-predicates|csv-stringify)',
+ '/node_modules/(?!d3|d3-array|internmap|delaunator|robust-predicates|csv-stringify|csv-parse)',
],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
From 6d1ac8b48a67dd1f0503df8313da68f7e5de933b Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 12 Jun 2024 15:29:06 -0500
Subject: [PATCH 56/71] Use WebPack v5 native Workers over loader
---
specifyweb/frontend/js_src/babel.config.cjs | 2 +-
.../js_src/lib/components/WbImport/helpers.ts | 3 +-
.../lib/components/WbImport/worker.d.ts | 13 -------
specifyweb/frontend/js_src/package-lock.json | 38 +++++++++++++++++++
specifyweb/frontend/js_src/package.json | 1 +
5 files changed, 41 insertions(+), 16 deletions(-)
delete mode 100644 specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts
diff --git a/specifyweb/frontend/js_src/babel.config.cjs b/specifyweb/frontend/js_src/babel.config.cjs
index 4157a6fd03b..2abfbb9c2db 100644
--- a/specifyweb/frontend/js_src/babel.config.cjs
+++ b/specifyweb/frontend/js_src/babel.config.cjs
@@ -19,7 +19,7 @@ module.exports = {
['@babel/preset-react'],
['@babel/preset-typescript'],
],
- plugins: ['@babel/plugin-transform-modules-commonjs'],
+ plugins: ['@babel/plugin-transform-modules-commonjs', "babel-plugin-transform-import-meta"],
},
},
};
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
index 085412ad3dc..c5934ea96b7 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
@@ -1,6 +1,5 @@
import { parse } from 'csv-parse/browser/esm';
import type { LocalizedString } from 'typesafe-i18n';
-import ImportXLSWorker from 'worker-loader!./xls.worker';
import { wbText } from '../../localization/workbench';
import { ajax } from '../../utils/ajax';
@@ -127,7 +126,7 @@ export const parseXls = async (
limit?: number
): Promise>> =>
new Promise((resolve, reject) => {
- const worker = new ImportXLSWorker();
+ const worker = new Worker(new URL('xls.worker.ts', import.meta.url));
const dateFormat =
fullDateFormat() === databaseDateFormat ? undefined : fullDateFormat();
worker.postMessage({ file, previewSize: limit, dateFormat });
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts b/specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts
deleted file mode 100644
index 82542e8e795..00000000000
--- a/specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-/**
- * Dummy definition for wbimportxls.worker.ts
- *
- * @module
- */
-
-declare module 'worker-loader!*' {
- class WebpackWorker extends Worker {
- constructor();
- }
-
- export default WebpackWorker;
-}
diff --git a/specifyweb/frontend/js_src/package-lock.json b/specifyweb/frontend/js_src/package-lock.json
index f714e1145d5..b78bffa4a87 100644
--- a/specifyweb/frontend/js_src/package-lock.json
+++ b/specifyweb/frontend/js_src/package-lock.json
@@ -75,6 +75,7 @@
"@types/underscore": "^1.10.24",
"babel-jest": "^28.1.3",
"babel-loader": "^8.2.5",
+ "babel-plugin-transform-import-meta": "^2.2.1",
"chalk": "^4.1.2",
"commander": "^9.4.1",
"core-js": "^3.23.4",
@@ -5558,6 +5559,25 @@
"@babel/core": "^7.0.0-0"
}
},
+ "node_modules/babel-plugin-transform-import-meta": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/babel-plugin-transform-import-meta/-/babel-plugin-transform-import-meta-2.2.1.tgz",
+ "integrity": "sha512-AxNh27Pcg8Kt112RGa3Vod2QS2YXKKJ6+nSvRtv7qQTJAdx0MZa4UHZ4lnxHUWA2MNbLuZQv5FVab4P1CoLOWw==",
+ "dev": true,
+ "dependencies": {
+ "@babel/template": "^7.4.4",
+ "tslib": "^2.4.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.10.0"
+ }
+ },
+ "node_modules/babel-plugin-transform-import-meta/node_modules/tslib": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz",
+ "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==",
+ "dev": true
+ },
"node_modules/babel-preset-current-node-syntax": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz",
@@ -20806,6 +20826,24 @@
"@babel/helper-define-polyfill-provider": "^0.3.1"
}
},
+ "babel-plugin-transform-import-meta": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/babel-plugin-transform-import-meta/-/babel-plugin-transform-import-meta-2.2.1.tgz",
+ "integrity": "sha512-AxNh27Pcg8Kt112RGa3Vod2QS2YXKKJ6+nSvRtv7qQTJAdx0MZa4UHZ4lnxHUWA2MNbLuZQv5FVab4P1CoLOWw==",
+ "dev": true,
+ "requires": {
+ "@babel/template": "^7.4.4",
+ "tslib": "^2.4.0"
+ },
+ "dependencies": {
+ "tslib": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz",
+ "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==",
+ "dev": true
+ }
+ }
+ },
"babel-preset-current-node-syntax": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz",
diff --git a/specifyweb/frontend/js_src/package.json b/specifyweb/frontend/js_src/package.json
index 3a58b51556a..125bec2a2e6 100644
--- a/specifyweb/frontend/js_src/package.json
+++ b/specifyweb/frontend/js_src/package.json
@@ -95,6 +95,7 @@
"@types/underscore": "^1.10.24",
"babel-jest": "^28.1.3",
"babel-loader": "^8.2.5",
+ "babel-plugin-transform-import-meta": "^2.2.1",
"chalk": "^4.1.2",
"commander": "^9.4.1",
"core-js": "^3.23.4",
From 7682feccfa1fb48b7da5d5ef8f96c73d91b20f1b Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Wed, 12 Jun 2024 15:38:31 -0500
Subject: [PATCH 57/71] Add ts-expect error and comment on import.meta.url
---
specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts | 1 +
1 file changed, 1 insertion(+)
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
index c5934ea96b7..8115ef6a070 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
@@ -126,6 +126,7 @@ export const parseXls = async (
limit?: number
): Promise>> =>
new Promise((resolve, reject) => {
+ // @ts-expect-error Specify is running with target 'esnext' with type 'module'. import.meta.url should be allowed
const worker = new Worker(new URL('xls.worker.ts', import.meta.url));
const dateFormat =
fullDateFormat() === databaseDateFormat ? undefined : fullDateFormat();
From cf65af8d48f4dffa89710a063431b5817af29e1d Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 13 Jun 2024 01:56:09 -0500
Subject: [PATCH 58/71] backend: split row results and allow parsing with
worker
---
.../migrations/0006_localityimport.py | 3 +-
.../migrations/0007_auto_20240613_0348.py | 28 ++
specifyweb/notifications/models.py | 29 +-
specifyweb/specify/import_locality.py | 356 ++++++++++++++----
specifyweb/specify/record_merging.py | 2 -
specifyweb/specify/views.py | 313 +++++++++++----
6 files changed, 558 insertions(+), 173 deletions(-)
create mode 100644 specifyweb/notifications/migrations/0007_auto_20240613_0348.py
diff --git a/specifyweb/notifications/migrations/0006_localityimport.py b/specifyweb/notifications/migrations/0006_localityimport.py
index 1620b319a13..772ad0dfaf0 100644
--- a/specifyweb/notifications/migrations/0006_localityimport.py
+++ b/specifyweb/notifications/migrations/0006_localityimport.py
@@ -18,10 +18,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='LocalityImport',
fields=[
- ('id', models.AutoField(db_column='localityimportid', primary_key=True, serialize=False, verbose_name='localityimportid')),
+ ('id', models.AutoField(db_column='LocalityImportID', primary_key=True, serialize=False, verbose_name='localityimportid')),
('taskid', models.CharField(max_length=256)),
('status', models.CharField(max_length=256)),
- ('result', models.JSONField(null=True)),
('collection', models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection')),
('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.SET_NULL, to='specify.recordset')),
('specifyuser', models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
diff --git a/specifyweb/notifications/migrations/0007_auto_20240613_0348.py b/specifyweb/notifications/migrations/0007_auto_20240613_0348.py
new file mode 100644
index 00000000000..8fa17bf85a3
--- /dev/null
+++ b/specifyweb/notifications/migrations/0007_auto_20240613_0348.py
@@ -0,0 +1,28 @@
+# Generated by Django 3.2.15 on 2024-06-13 03:48
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('notifications', '0006_localityimport'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='LocalityImportRowResult',
+ fields=[
+ ('id', models.AutoField(db_column='LocalityImportRowResultID',
+ primary_key=True, serialize=False, verbose_name='localityimportrowresultid')),
+ ('rownumber', models.IntegerField()),
+ ('result', models.JSONField()),
+ ('localityimport', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
+ related_name='results', to='notifications.localityimport')),
+ ],
+ options={
+ 'db_table': 'localityimportrowresult',
+ },
+ ),
+ ]
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index 69f55a97f2d..cb55e8a3a2c 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -1,6 +1,6 @@
from django.db import models
from django.utils import timezone
-from specifyweb.specify.models import Specifyuser, Collection, Agent, Recordset
+from specifyweb.specify.models import Specifyuser, Collection, Agent, Recordset, Locality
class Message(models.Model):
@@ -15,8 +15,10 @@ class AsyncTask(models.Model):
status = models.CharField(max_length=256)
timestampcreated = models.DateTimeField(default=timezone.now)
timestampmodified = models.DateTimeField(auto_now=True)
- specifyuser = models.ForeignKey(Specifyuser, db_column='SpecifyUserID', on_delete=models.CASCADE)
- collection = models.ForeignKey(Collection, db_column="CollectionID", on_delete=models.CASCADE)
+ specifyuser = models.ForeignKey(
+ Specifyuser, db_column='SpecifyUserID', on_delete=models.CASCADE)
+ collection = models.ForeignKey(
+ Collection, db_column="CollectionID", on_delete=models.CASCADE)
createdbyagent = models.ForeignKey(
Agent, null=True, db_column="CreatedByAgentID", on_delete=models.SET_NULL, related_name="+")
modifiedbyagent = models.ForeignKey(
@@ -38,11 +40,24 @@ class Meta:
db_table = 'spmerging'
# managed = False
+
class LocalityImport(AsyncTask):
- id = models.AutoField('localityimportid', primary_key=True, db_column='localityimportid')
- result = models.JSONField(null=True)
+ id = models.AutoField('localityimportid',
+ primary_key=True, db_column='LocalityImportID')
recordset = models.ForeignKey(
Recordset, null=True, blank=True, db_column="RecordSetID", on_delete=models.SET_NULL)
-
+
+ class Meta:
+ db_table = 'localityimport'
+
+
+class LocalityImportRowResult(models.Model):
+ id = models.AutoField('localityimportrowresultid',
+ primary_key=True, db_column='LocalityImportRowResultID')
+ rownumber = models.IntegerField()
+ result = models.JSONField()
+ localityimport = models.ForeignKey(
+ LocalityImport, on_delete=models.CASCADE, related_name="results")
+
class Meta:
- db_table = 'localityimport'
\ No newline at end of file
+ db_table = 'localityimportrowresult'
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 4b1c8c167dc..b85f5505ac3 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -3,13 +3,14 @@
from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union, Callable, TypedDict
from datetime import datetime
from django.db import transaction
+from django.core.serializers.json import DjangoJSONEncoder
from celery.exceptions import Ignore
import specifyweb.specify.models as spmodels
from specifyweb.celery_tasks import LogErrorsTask, app
from specifyweb.specify.datamodel import datamodel
-from specifyweb.notifications.models import LocalityImport, Message
+from specifyweb.notifications.models import LocalityImport, LocalityImportRowResult, Message
from specifyweb.specify.parse import ParseFailureKey, parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
LocalityParseErrorMessageKey = Literal[
@@ -29,12 +30,67 @@
ImportModel = Literal['Locality', 'Geocoorddetail']
+localityimport_parse_success = {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "locality": {
+ "type": "object"
+ },
+ "geocoorddetail": {
+ "type": [ "null", "string"],
+ },
+ "locality_id": {
+ "type": "number",
+ "minimum": 0
+ },
+ "row_number": {
+ "type": "number",
+ "minimum": 1
+ }
+ },
+ "required": ["locality", "geocoorddetail", "locality_id", "row_number"],
+ "additionalProperties": False
+ }
+}
+
+localityimport_parse_error = {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "description": "Keys for errors which occured during parsing",
+ "type": "string",
+ "enum": localityParseErrorMessages
+ },
+ "field": {
+ "description": "The field name which had the parsing error",
+ "type": "string"
+ },
+ "payload": {
+ "description": "An object containing data relating to the error",
+ "type": "object",
+ "example": {'badType': 'Preson', 'validTypes': ['Organization', 'Person', 'Other', 'Group']}
+ },
+ "rowNumber": {
+ "type": "integer",
+ "minimum": 1
+ }
+ },
+ "required": ["message", "field", "payload", "rowNumber"],
+ "additionalProperties": False
+ }
+}
+
Progress = Callable[[str, int, int], None]
class LocalityImportStatus:
PENDING = 'PENDING'
PARSING = 'PARSING'
+ PARSED = 'PARSED'
PROGRESS = 'PROGRESS'
SUCCEEDED = 'SUCCEEDED'
ABORTED = 'ABORTED'
@@ -43,57 +99,105 @@ class LocalityImportStatus:
@app.task(base=LogErrorsTask, bind=True)
def import_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]], create_recordset: bool) -> None:
-
def progress(state, current: int, total: int):
- if not self.request.called_directly:
- self.update_state(state=state, meta={
- 'current': current, 'total': total})
+ self.update_state(state=state, meta={
+ 'current': current, 'total': total})
+
collection = spmodels.Collection.objects.get(id=collection_id)
with transaction.atomic():
results = upload_locality_set(
collection, column_headers, data, progress)
- li = LocalityImport.objects.get(taskid=self.request.id)
+ li = resolve_localityimport_result(
+ self.request.id, results, collection, create_recordset)
if results['type'] == 'ParseError':
self.update_state(LocalityImportStatus.FAILED, meta={
- "errors": results['errors']})
- li.status = LocalityImportStatus.FAILED
- li.result = json.dumps(results['errors'])
+ "errors": [error.to_json() for error in results["errors"]]})
+
Message.objects.create(user=li.specifyuser, content=json.dumps({
'type': 'localityimport-failed',
'taskid': li.taskid,
- 'errors': json.dumps(results['errors'])
+ 'errors': json.dumps([error.to_json() for error in results["errors"]])
}))
elif results['type'] == 'Uploaded':
- li.recordset = create_localityimport_recordset(
- collection, li.specifyuser, results['localities']) if create_recordset else None
-
recordset_id = None if li.recordset is None else li.recordset.pk
-
+ localitites = []
+ geocoorddetails = []
+ for row in results["results"]:
+ localitites.append(row["locality"])
+ if row["geocoorddetail"]:
+ geocoorddetails.append(row["geocoorddetail"])
self.update_state(state=LocalityImportStatus.SUCCEEDED, meta={
- "recordsetid": recordset_id, "localities": results['localities'], "geocoorddetails": results['geocoorddetails']})
- li.result = json.dumps({
- 'recordsetid': recordset_id,
- 'localities': json.dumps(results['localities']),
- 'geocoorddetails': json.dumps(results['geocoorddetails'])
- })
- li.status = LocalityImportStatus.SUCCEEDED
+ "recordsetid": recordset_id, "localities": localitites, "geocoorddetails": geocoorddetails})
+
Message.objects.create(user=li.specifyuser, content=json.dumps({
'type': 'localityimport-succeeded',
'taskid': li.taskid,
'recordsetid': recordset_id,
- 'localities': json.dumps(results['localities']),
- 'geocoorddetails': json.dumps(results["geocoorddetails"])
+ "localities": localitites,
+ "geocoorddetails": geocoorddetails
+ }))
+
+ # prevent Celery from overriding the State of the Task
+ raise Ignore()
+
+
+@app.task(base=LogErrorsTask, bind=True)
+def parse_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]]):
+ def progress(state, current: int, total: int):
+ self.update_state(state=state, meta={
+ 'current': current, 'total': total})
+
+ collection = spmodels.Collection.objects.get(id=collection_id)
+
+ with transaction.atomic():
+ to_upload, errors = parse_locality_set(
+ collection, column_headers, data, progress)
+
+ li = resolve_localityimport_result(
+ self.request.id, (to_upload, errors), collection)
+
+ if li.status == LocalityImportStatus.FAILED:
+ self.update_state(LocalityImportStatus.FAILED, meta={
+ "errors": errors})
+
+ Message.objects.create(user=li.specifyuser, content=json.dumps({
+ 'type': 'localityimport-failed',
+ 'taskid': li.taskid,
+ 'errors': json.dumps(errors)
}))
- li.save()
+ elif li.status == LocalityImportStatus.PARSED:
+ localitites = len(to_upload)
+ geocoorddetails = 0
+ for parsed in to_upload:
+ if parsed['geocoorddetail'] is not None:
+ geocoorddetails += 1
+
+ self.update_state(LocalityImportStatus.PARSED, meta={
+ "localitites": localitites,
+ "geocoorddetails": geocoorddetails
+ })
+ Message.objects.create(user=li.specifyuser, content=json.dumps({
+ 'type': 'localityimport-parse-succeeded',
+ 'taskid': li.taskid,
+ "localitites": localitites,
+ "geocoorddetails": geocoorddetails
+ }))
# prevent Celery from overriding the State of the Task
raise Ignore()
+class JSONParseError(TypedDict):
+ message: str
+ field: str
+ payload: Dict[str, Any]
+ rowNumber: int
+
+
class ParseError(NamedTuple):
message: Union[ParseFailureKey, LocalityParseErrorMessageKey]
field: Optional[str]
@@ -104,10 +208,17 @@ class ParseError(NamedTuple):
def from_parse_failure(cls, parse_failure: BaseParseFailure, field: str, row_number: int):
return cls(message=parse_failure.message, field=field, payload=parse_failure.payload, row_number=row_number)
- def to_json(self):
+ def to_json(self) -> JSONParseError:
return {"message": self.message, "field": self.field, "payload": self.payload, "rowNumber": self.row_number}
+class ParsedRow(TypedDict):
+ row_number: int
+ locality: Dict[str, Any]
+ geocoorddetail: Optional[Dict[str, Any]]
+ locality_id: int
+
+
class ParseSuccess(NamedTuple):
to_upload: Dict[str, Any]
model: ImportModel
@@ -119,9 +230,91 @@ def from_base_parse_success(cls, parse_success: BaseParseSuccess, model: ImportM
return cls(parse_success.to_upload, model, locality_id, row_number)
-def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Tuple[List[ParseSuccess], List[ParseError]]:
+class UploadSuccessRow(TypedDict):
+ locality: int
+ geocoorddetail: Optional[int]
+
+
+class UploadSuccess(TypedDict):
+ type: Literal["Uploaded"]
+ results: List[UploadSuccessRow]
+
+
+class UploadParseError(TypedDict):
+ type: Literal["ParseError"]
+ errors: List[ParseError]
+
+
+@transaction.atomic
+def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedRow], List[ParseError]], Union[UploadSuccess, UploadParseError]], collection, create_recordset: bool = False) -> LocalityImport:
+
+ li = LocalityImport.objects.get(taskid=taskid)
+
+ # the results come from parse_locality_set
+ if isinstance(results, tuple):
+ to_upload, errors = results
+ if len(errors) > 0:
+ status = LocalityImportStatus.FAILED
+ for error in errors:
+ result = error.to_json()
+ LocalityImportRowResult.objects.create(
+ localityimport=li,
+ rownumber=result["rowNumber"],
+ result=json.dumps(result, cls=DjangoJSONEncoder)
+ )
+ else:
+ status = LocalityImportStatus.PARSED
+ localities = len(to_upload)
+ geocoorddetails = 0
+ for parsed in to_upload:
+ if parsed['geocoorddetail'] is not None:
+ geocoorddetails += 1
+ LocalityImportRowResult.objects.create(
+ localityimport=li,
+ rownumber=-1,
+ result=json.dumps({
+ "localities": localities,
+ "geocoorddetails": geocoorddetails
+ })
+ )
+
+ # the results come from upload_locality_set
+ else:
+ if results['type'] == 'ParseError':
+ status = LocalityImportStatus.FAILED
+ for error in results['errors']:
+ result = error.to_json()
+ LocalityImportRowResult.objects.create(
+ localityimport=li,
+ rownumber=error.row_number,
+ result=json.dumps(result, cls=DjangoJSONEncoder)
+ )
+
+ elif results['type'] == 'Uploaded':
+ status = LocalityImportStatus.SUCCEEDED
+ localities = []
+ for index, row in enumerate(results['results']):
+ row_number = index + 1
+ localities.append(row['locality'])
+
+ LocalityImportRowResult.objects.create(
+ localityimport=li,
+ rownumber=row_number,
+ result=json.dumps(row, cls=DjangoJSONEncoder)
+ )
+
+ li.recordset = create_localityimport_recordset(
+ collection, li.specifyuser, localities) if create_recordset else None
+
+ li.status = status
+ li.save()
+
+ return li
+
+
+def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Tuple[List[ParsedRow], List[ParseError]]:
errors: List[ParseError] = []
- to_upload: List[ParseSuccess] = []
+ to_upload: List[ParsedRow] = []
headers = [header.strip().lower() for header in raw_headers]
@@ -140,16 +333,16 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
processed = 0
total = len(data)
- for row_mumber, row in enumerate(data, start=1):
+ for row_number, row in enumerate(data, start=1):
guid = row[guid_index]
locality_query = spmodels.Locality.objects.filter(guid=guid)
if len(locality_query) == 0:
errors.append(ParseError(message='noLocalityMatchingGuid', field='guid',
- payload={'guid': guid}, row_number=row_mumber))
+ payload={'guid': guid}, row_number=row_number))
if len(locality_query) > 1:
errors.append(ParseError(message='multipleLocalitiesWithGuid', field=None, payload={'guid': guid, 'localityIds': list(
- locality.id for locality in locality_query)}, row_number=row_mumber))
+ locality.id for locality in locality_query)}, row_number=row_number))
locality_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
for dict in updatable_locality_fields_index]
@@ -161,24 +354,16 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
locality_query) != 1 else locality_query[0].id
parsed_locality_fields = [parse_field(
- collection, 'Locality', dict['field'], dict['value'], locality_id, row_mumber) for dict in locality_values if dict['value'].strip() != ""]
+ collection, 'Locality', dict['field'], dict['value'], locality_id, row_number) for dict in locality_values if dict['value'].strip() != ""]
parsed_geocoorddetail_fields = [parse_field(
- collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_mumber) for dict in geocoorddetail_values if dict['value'].strip() != ""]
-
- merged_locality_result, locality_errors = merge_parse_results(
- 'Locality', parsed_locality_fields, locality_id, row_mumber)
+ collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_number) for dict in geocoorddetail_values if dict['value'].strip() != ""]
- merged_geocoorddetail_result, geocoord_errors = merge_parse_results(
- 'Geocoorddetail', parsed_geocoorddetail_fields, locality_id, row_mumber)
+ parsed_row, parsed_errors = merge_parse_results(
+ [*parsed_locality_fields, *parsed_geocoorddetail_fields], locality_id, row_number)
- errors.extend([*locality_errors, *geocoord_errors])
-
- if merged_locality_result is not None:
- to_upload.append(merged_locality_result)
-
- if merged_geocoorddetail_result is not None:
- to_upload.append(merged_geocoorddetail_result)
+ errors.extend(parsed_errors)
+ to_upload.append(parsed_row)
if progress is not None:
processed += 1
@@ -196,78 +381,83 @@ def parse_field(collection, table_name: ImportModel, field_name: str, field_valu
return ParseSuccess.from_base_parse_success(parsed, table_name, locality_id, row_number)
-def merge_parse_results(table_name: ImportModel, results: List[Union[ParseSuccess, ParseError]], locality_id: int, row_number: int) -> Tuple[Optional[ParseSuccess], List[ParseError]]:
- to_upload = {}
+def merge_parse_results(results: List[Union[ParseSuccess, ParseError]], locality_id: int, row_number: int) -> Tuple[ParsedRow, List[ParseError]]:
+ to_upload: ParsedRow = {
+ "locality_id": locality_id,
+ "row_number": row_number,
+ "locality": {},
+ "geocoorddetail": {}
+ }
errors = []
for result in results:
if isinstance(result, ParseError):
errors.append(result)
else:
- to_upload.update(result.to_upload)
- return None if len(to_upload) == 0 else ParseSuccess(to_upload, table_name, locality_id, row_number), errors
-
-
-class UploadSuccess(TypedDict):
- type: Literal["Uploaded"]
- localities: List[int]
- geocoorddetails: List[int]
+ to_upload[result.model.lower()].update(result.to_upload)
+ if len(to_upload['geocoorddetail']) == 0:
+ to_upload['geocoorddetail'] = None
-class UploadParseError(TypedDict):
- type: Literal["ParseError"]
- errors: List[ParseError]
+ return to_upload, errors
def upload_locality_set(collection, column_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Union[UploadSuccess, UploadParseError]:
to_upload, errors = parse_locality_set(
collection, column_headers, data, progress)
- result = {
- "type": None,
- }
if len(errors) > 0:
- result["type"] = "ParseError"
- result["errors"] = [error.to_json() for error in errors]
- return result
+ return {
+ "type": "ParseError",
+ "errors": errors
+ }
+
+ return upload_from_parsed(to_upload, progress)
- result["type"] = "Uploaded"
- result["localities"] = []
- result["geocoorddetails"] = []
+def upload_from_parsed(uploadables: List[ParsedRow], progress: Optional[Progress] = None) -> UploadSuccess:
processed = 0
- total = len(to_upload)
+ total = len(uploadables)
+
+ uploaded: List[UploadSuccessRow] = [
+ {"locality": None, "geocoorddetail": None} for _ in range(total)]
with transaction.atomic():
- for parse_success in to_upload:
- uploadable = parse_success.to_upload
- model_name = parse_success.model
- locality_id = parse_success.locality_id
+ for parsed_row in uploadables:
+ locality_id = parsed_row["locality_id"]
if locality_id is None:
raise ValueError(
- f"No matching Locality found on row {parse_success.row_number}")
+ f"No matching Locality found on row {parsed_row['row_number']}")
- model = getattr(spmodels, model_name)
locality = spmodels.Locality.objects.get(id=locality_id)
- if model_name == 'Geocoorddetail':
+ # Queryset.update() is not used here as it does not send pre/post save signals
+ for field, value in parsed_row['locality'].items():
+ setattr(locality, field, value)
+ locality.save()
+ try:
+ uploaded[parsed_row['row_number'] -
+ 1]["locality"] = locality_id
+ except:
+ raise KeyError(uploaded)
+
+ if parsed_row['geocoorddetail'] is not None:
locality.geocoorddetails.get_queryset().delete()
- geoCoordDetail = model.objects.create(**uploadable)
+ geoCoordDetail = spmodels.Geocoorddetail.objects.create(
+ **parsed_row['geocoorddetail'])
geoCoordDetail.locality = locality
geoCoordDetail.save()
- result["geocoorddetails"].append(geoCoordDetail.id)
- elif model_name == 'Locality':
- # Queryset.update() is not used here as it does not send pre/post save signals
- for field, value in uploadable.items():
- setattr(locality, field, value)
- locality.save()
- result["localities"].append(locality_id)
+ uploaded[parsed_row["row_number"] -
+ 1]["geocoorddetail"] = geoCoordDetail.pk
if progress is not None:
processed += 1
progress(LocalityImportStatus.PROGRESS, processed, total)
- return result
+ return {
+ "type": "Uploaded",
+ "results": uploaded
+ }
# Example: Wed Jun 07 2023
diff --git a/specifyweb/specify/record_merging.py b/specifyweb/specify/record_merging.py
index 362602cb2fc..11416880d15 100644
--- a/specifyweb/specify/record_merging.py
+++ b/specifyweb/specify/record_merging.py
@@ -4,7 +4,6 @@
import json
from itertools import groupby
-import re
from typing import Any, Callable, Dict, List, Optional
import traceback
@@ -12,7 +11,6 @@
from django.db import IntegrityError, transaction, models
from specifyweb.notifications.models import Message, Spmerging
from django.db.models import Q
-from django.db.models.deletion import ProtectedError
from specifyweb.businessrules.exceptions import BusinessRuleException
from specifyweb.celery_tasks import LogErrorsTask, app
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index bba00c4319d..5ab9cc0a663 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -5,7 +5,7 @@
import json
import mimetypes
from functools import wraps
-from typing import Union
+from typing import Union, List, Tuple, Dict, Any
from uuid import uuid4
from django import http
@@ -21,7 +21,7 @@
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
from specifyweb.celery_tasks import app, CELERY_TASK_STATE
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
-from specifyweb.specify.import_locality import localityParseErrorMessages, parse_locality_set as _parse_locality_set, import_locality_task, LocalityImportStatus
+from specifyweb.specify.import_locality import localityimport_parse_success, localityimport_parse_error, parse_locality_set as _parse_locality_set, upload_locality_set as _upload_locality_set, create_localityimport_recordset, import_locality_task, parse_locality_task, LocalityImportStatus
from . import api, models as spmodels
from .specify_jar import specify_jar
@@ -746,34 +746,6 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
return http.HttpResponse(f'Task {merge.taskid} is not running and cannot be aborted.')
-locality_set_parse_error_data = {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "message": {
- "description": "Keys for errors which occured during parsing",
- "type": "string",
- "enum": localityParseErrorMessages
- },
- "field": {
- "description": "The field name which had the parsing error",
- "type": "string"
- },
- "payload": {
- "description": "An object containing data relating to the error",
- "type": "object",
- "example": {'badType': 'Preson', 'validTypes': ['Organization', 'Person', 'Other', 'Group']}
- },
- "rowNumber": {
- "type": "integer",
- "minimum": 0
- }
- }
- }
-}
-
-
@openapi(schema={
'post': {
"requestBody": {
@@ -802,22 +774,87 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"type": "boolean",
"description": "When True, creates a recordset in the logged-in collection for the logged-in user with the matched/updated localities if the upload succeeds",
"default": True
+ },
+ "runInBackground": {
+ "type": "boolean",
+ "description": "Whether the task should be ran in the background. Defaults to True",
+ "default": False
}
- }
+ },
+ "required": ["columnHeaders", "data"],
+ "additionalProperties": False
}
}
}
},
"responses": {
"200": {
- "description": "Returns a GUID (job ID)",
+ "description": "Task finished synchronously",
"content": {
- "text/plain": {
+ "application/json": {
+ "schema": {
+ "oneOf": [
+ {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["ParseError"]
+ },
+ "errors": localityimport_parse_error
+ },
+ "required": ["type", "errors"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["Uploaded"]
+ },
+ "recordsetid": {
+ "type": ["string", "null"]
+ },
+ "localities": {
+ "type": "array",
+ "description": "An array of matched/updated Locality IDs",
+ "items": {
+ "type": "number"
+ }
+ },
+ "geocoorddetails": {
+ "type": "array",
+ "description": "An array of created GeoCoordDetail IDs",
+ "items": {
+ "type": "number"
+ }
+ }
+ },
+ "required": ["type", "recordsetid", "localities", "geocoorddetails"],
+ "additionalProperties": False
+ }
+ ]
+ }
+ }
+ }
+ },
+ "201": {
+ "content": {
+ "application/json": {
"schema": {
- "type": "string",
- "maxLength": 36,
- "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
+ "description": "Task started by the worker. Returns the newly created ID of the task",
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
+ }
+ }
+ }
}
+
}
}
},
@@ -832,27 +869,56 @@ def upload_locality_set(request: http.HttpRequest):
column_headers = request_data["columnHeaders"]
data = request_data["data"]
create_recordset = request_data.get("createRecordSet", True)
+ run_in_background = request_data.get("runInBackground", False)
+
+ resolved_upload_function = start_locality_set_background if run_in_background else upload_locality_set_foreground
+
+ result = resolved_upload_function(request.specify_collection, request.specify_user,
+ request.specify_user_agent, column_headers, data, create_recordset)
+ return http.JsonResponse(result, status=201 if run_in_background else 200, safe=False)
+
+
+def start_locality_set_background(collection, specify_user, agent, column_headers: List[str], data: List[List[str]], create_recordset: bool = False, parse_only: bool = False) -> str:
task_id = str(uuid4())
- task = import_locality_task.apply_async(
- [request.specify_collection.id, column_headers, data, create_recordset], task_id=task_id)
+ args = [collection.id, column_headers, data]
+ if not parse_only:
+ args.append(create_recordset)
+ task_function = parse_locality_task.apply_async if parse_only else import_locality_task.apply_async
+
+ task = task_function(args, task_id=task_id)
LocalityImport.objects.create(
- result=None,
taskid=task.id,
status=LocalityImportStatus.PENDING,
- collection=request.specify_collection,
- specifyuser=request.specify_user,
- createdbyagent=request.specify_user_agent,
- modifiedbyagent=request.specify_user_agent,
+ collection=collection,
+ specifyuser=specify_user,
+ createdbyagent=agent,
+ modifiedbyagent=agent,
)
- Message.objects.create(user=request.specify_user, content=json.dumps({
+ Message.objects.create(user=specify_user, content=json.dumps({
'type': 'localityimport-starting',
'taskid': task.id
}))
- return http.JsonResponse(task.id, safe=False)
+ return task.id
+
+
+def upload_locality_set_foreground(collection, specify_user, agent, column_headers: List[str], data: List[List[str]], create_recordset: bool):
+ result = _upload_locality_set(collection, column_headers, data)
+
+ if result["type"] == 'ParseError':
+ return result
+
+ localities = [row["locality"] for row in result["results"]]
+
+ recordset = create_localityimport_recordset(
+ collection, specify_user, localities) if create_recordset else None
+
+ result["recordsetid"] = None if recordset is None else recordset.pk
+
+ return result
@openapi(schema={
@@ -861,7 +927,7 @@ def upload_locality_set(request: http.HttpRequest):
"200": {
"description": "Data fetched successfully",
"content": {
- "text/plain": {
+ "application/json": {
"schema": {
"oneOf": [
{
@@ -869,12 +935,14 @@ def upload_locality_set(request: http.HttpRequest):
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.ABORTED]
+ "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.ABORTED, LocalityImportStatus.PARSED]
},
"taskinfo": {
"type": "string",
},
- }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
},
{
"type": "object",
@@ -896,7 +964,9 @@ def upload_locality_set(request: http.HttpRequest):
}
}
},
- }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
},
{
"type": "object",
@@ -909,7 +979,7 @@ def upload_locality_set(request: http.HttpRequest):
"type": "object",
"properties": {
"recordsetid": {
- "type": "number"
+ "type": ["number", "null"]
},
"localities": {
"type": "array",
@@ -925,9 +995,13 @@ def upload_locality_set(request: http.HttpRequest):
"type": "number"
}
}
- }
+ },
+ "required": ["recordsetid", "localities", "geocoorddetails"],
+ "additionalProperties": False
}
- }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
},
{
"type": "object",
@@ -939,10 +1013,12 @@ def upload_locality_set(request: http.HttpRequest):
"taskinfo": {
"type": "object",
"properties": {
- "errors": locality_set_parse_error_data
+ "errors": localityimport_parse_error
}
}
- }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
}
]
}
@@ -950,7 +1026,13 @@ def upload_locality_set(request: http.HttpRequest):
}
},
'404': {
- 'description': 'The localityimport object with task id was not found',
+ "description": 'The localityimport object with task id was not found',
+ "content": {
+ "text/plain": {
+ "type": "string",
+ "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ }
+ }
},
}
},
@@ -972,16 +1054,56 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
}
if locality_import.status == LocalityImportStatus.FAILED:
+
status["taskstatus"] = LocalityImportStatus.FAILED
- status["taskinfo"] = {"errors": json.loads(locality_import.result)}
+
+ if isinstance(result.info, dict) and 'errors' in result.info.keys():
+ errors = result.info["errors"]
+ else:
+ results = locality_import.results.all()
+ errors = [json.loads(error.result) for error in results]
+
+ status["taskinfo"] = {"errors": errors}
+
+ elif locality_import.status == LocalityImportStatus.PARSED:
+ status["taskstatus"] = LocalityImportStatus.PARSED
+
+ if isinstance(result.info, dict) and resolved_state == LocalityImportStatus.PARSED:
+ result = {
+ "localities": result.info["localities"],
+ "geocoorddetails": result.info["geocoorddetails"]
+ }
+ else:
+ results = locality_import.results.get_queryset().get(rownumber=-1)
+ result = json.loads(results.result)
+
+ status["taskinfo"] = result
+
elif locality_import.status == LocalityImportStatus.SUCCEEDED:
status["taskstatus"] = LocalityImportStatus.SUCCEEDED
- success_result = json.loads(locality_import.result)
- status["taskinfo"] = {
- "recordsetid": success_result["recordsetid"],
- "localities": json.loads(success_result["localities"]),
- "geocoorddetails": json.loads(success_result["geocoorddetails"])
- }
+ recordset_id = locality_import.recordset.id if locality_import.recordset is not None else None
+ if isinstance(result.info, dict) and resolved_state == LocalityImportStatus.SUCCEEDED:
+ result = {
+ "recordsetid": recordset_id,
+ "localities": result.info["localities"],
+ "geocoorddetails": result.info["geocoorddetails"]
+ }
+ else:
+ results = locality_import.results.all()
+ localitites = []
+ geocoorddetails = []
+ for row in results:
+ parsed = json.loads(row.result)
+ localitites.append(parsed["locality"])
+ if parsed["geocoorddetail"] is not None:
+ geocoorddetails.append(parsed["geocoorddetail"])
+ result = {
+ "recordsetid": recordset_id,
+ "localities": localitites,
+ "geocoorddetails": geocoorddetails
+ }
+
+ status["taskinfo"] = result
return http.JsonResponse(status, safe=False)
@@ -1005,13 +1127,21 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
'description': 'Response message about the status of the task'
},
},
+ "required": ["type", "message"],
+ "additionalProperties": False
},
},
},
},
- '404': {
- 'description': 'The localityimport with task id is not found',
- },
+ "404": {
+ "description": 'The localityimport object with task id was not found',
+ "content": {
+ "text/plain": {
+ "type": "string",
+ "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ }
+ }
+ }
},
},
})
@@ -1075,23 +1205,41 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
"type": "string"
}
}
+ },
+ "runInBackground": {
+ "type": "boolean",
+ "description": "Whether the task should be ran in the background. Defaults to True",
+ "default": False
}
- }
+ },
+ "required": ["columnHeaders", "data"],
+ "additionalProperties": False
}
}
}
},
"responses": {
"200": {
- "description": "Locality Import Set parsed successfully",
+ "description": "Successful response returned by worker",
+ "content": {
+ "application/json": {
+ "schema": localityimport_parse_success
+ }
+ }
+ },
+ "201": {
"content": {
"application/json": {
"schema": {
- "type": "array",
- "description": "An array of matched Locality IDs",
- "items": {
- "type": "integer",
- "minimum": 0
+ "description": "Task started by the worker. Returns the newly created ID of the task",
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
+ }
+ }
}
}
}
@@ -1101,7 +1249,7 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
"description": "Locality Import Set not parsed successfully",
"content": {
"application/json": {
- "schema": locality_set_parse_error_data
+ "schema": localityimport_parse_error
}
}
}
@@ -1116,14 +1264,21 @@ def parse_locality_set(request: http.HttpRequest):
request_data = json.loads(request.body)
column_headers = request_data["columnHeaders"]
data = request_data["data"]
+ run_in_background = request_data.get("runInBackground", False)
+ if not run_in_background:
+ status, result = parse_locality_set_foreground(
+ request.specify_collection, column_headers, data)
+ else:
+ status, result = 201, start_locality_set_background(
+ request.specify_collection, request.specify_user, request.specify_user_agent, column_headers, data, False, True)
+ return http.JsonResponse(result, status=status, safe=False)
+
+def parse_locality_set_foreground(collection, column_headers: List[str], data: List[List[str]]) -> Tuple[int, Dict[str, Any]]:
parsed, errors = _parse_locality_set(
- request.specify_collection, column_headers, data)
+ collection, column_headers, data)
if len(errors) > 0:
- result = [error.to_json() for error in errors]
- return http.JsonResponse(result, status=422, safe=False)
-
- result = [ps.locality_id for ps in parsed]
+ return 422, errors
- return http.JsonResponse(result, safe=False)
+ return 200, parsed
From e53af5475377f30921d7c79619b0ceff9bec748c Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 13 Jun 2024 02:33:46 -0500
Subject: [PATCH 59/71] Show confirmation dialog with number of affected
records
---
.../lib/components/LocalityImport/Status.tsx | 62 ++++++++++++++++++-
.../lib/components/LocalityImport/index.tsx | 26 ++++++--
.../lib/components/LocalityImport/types.ts | 11 ++++
.../lib/components/LocalityImport/utils.ts | 4 +-
.../Notifications/NotificationRenderers.tsx | 11 ++++
.../components/QueryBuilder/Components.tsx | 3 +-
.../js_src/lib/localization/locality.ts | 11 +++-
specifyweb/specify/views.py | 26 +++++++-
8 files changed, 141 insertions(+), 13 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 804b5ff0b68..82f92dc2ea0 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -48,9 +48,11 @@ const statusDimensionKey = 'localityimport-status';
export function LocalityImportStatus({
taskId,
+ onImport: handleImport,
onClose: handleClose,
}: {
readonly taskId: string;
+ readonly onImport: () => void;
readonly onClose: () => void;
}): JSX.Element {
const [state, setState] = React.useState({
@@ -62,7 +64,7 @@ export function LocalityImportStatus({
React.useEffect(() => {
let destructorCalled = false;
const fetchStatus = () =>
- void ajax(`/api/localityset/status/${taskId}`, {
+ void ajax(`/api/localityset/status/${taskId}/`, {
headers: { Accept: 'application/json' },
})
.then(({ data }) => {
@@ -109,6 +111,13 @@ export function LocalityImportStatus({
recordSetId={state.taskinfo.recordsetid}
onClose={handleClose}
/>
+ ) : state.taskstatus === 'PARSED' ? (
+
) : state.taskstatus === 'FAILED' ? (
void;
}): JSX.Element {
const percentage = Math.round((currentProgress / total) * 100);
- useTitle(localized(`${header} ${percentage}%`));
+ useTitle(localized(`${percentage}% ${header}`));
return (
void;
+ readonly onClose: () => void;
+}): JSX.Element {
+ return (
+
+ {commonText.close()}
+ {
+ handleClose();
+ handleImport();
+ }}
+ >
+ {commonText.import()}
+
+ >
+ }
+ header={localityImportStatusLocalization.PARSED}
+ modal={false}
+ onClose={handleClose}
+ >
+
+
+ {localityText.localityImportEffectCounts({
+ localityTabelLabel: tables.Locality.label,
+ geoCoordDetailTableLabel: tables.GeoCoordDetail.label,
+ })}
+
+
+
+
+
+ );
+}
+
export function LocalityImportSuccess({
localityIds,
geoCoordDetailIds,
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
index 60149181f14..8862d0c8c18 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
@@ -39,6 +39,24 @@ export function ImportLocalityDataSet(): JSX.Element {
setHeaders([]);
}
+ function handleParse(
+ columnHeaders: RA,
+ data: RA>
+ ): void {
+ loading(
+ ajax('/api/localityset/parse/', {
+ method: 'POST',
+ headers: { Accept: 'application/json' },
+ body: {
+ columnHeaders,
+ data,
+ createRecordSet: false,
+ runInBackground: true,
+ },
+ }).then(({ data }) => setTaskId(data))
+ );
+ }
+
function handleImport(
columnHeaders: RA,
data: RA>
@@ -51,6 +69,7 @@ export function ImportLocalityDataSet(): JSX.Element {
columnHeaders,
data,
createRecordSet: true,
+ runInBackground: true,
},
}).then(({ data }) => setTaskId(data))
);
@@ -94,7 +113,7 @@ export function ImportLocalityDataSet(): JSX.Element {
(errors) => errors.length > 0
)
)
- handleImport(headers, data);
+ handleParse(headers, data);
}}
/>
{Object.values(headerErrors).some((errors) => errors.length > 0) && (
@@ -105,9 +124,7 @@ export function ImportLocalityDataSet(): JSX.Element {
{headerErrors.missingRequiredHeaders.length === 0 && (
{
- const storedHeaders = headers;
- const storedData = data;
- handleImport(storedHeaders, storedData);
+ handleParse(headers, data);
resetContext();
}}
>
@@ -160,6 +177,7 @@ export function ImportLocalityDataSet(): JSX.Element {
setTaskId(undefined)}
+ onImport={(): void => handleImport(headers, data)}
/>
)}
>
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
index 8a925ff03da..d9c5ce4d312 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
@@ -21,6 +21,7 @@ export type LocalityImportParseError = {
export type LocalityImportTaskStatus =
| 'ABORTED'
| 'FAILED'
+ | 'PARSED'
| 'PARSING'
| 'PENDING'
| 'PROGRESS'
@@ -40,6 +41,16 @@ export type LocalityImportState =
};
}
>
+ | State<
+ 'PARSED',
+ {
+ readonly taskstatus: 'PARSED';
+ readonly taskinfo: {
+ readonly localities: number;
+ readonly geocoorddetails: number;
+ };
+ }
+ >
| State<
'PARSING',
{
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
index d2731ce9804..ea6be2554fa 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
@@ -31,6 +31,7 @@ export const localityImportStatusLocalization: RR<
LocalityImportTaskStatus,
LocalizedString
> = {
+ PARSED: localityText.localityImportParsed(),
PARSING: localityText.localityImportParsing(),
PENDING: localityText.localityImportStarting(),
PROGRESS: localityText.localityImportProgressing(),
@@ -61,8 +62,7 @@ export function resolveImportLocalityErrorMessage(
} else {
return commonText.colonLine({
label: key,
- value:
- Object.keys(payload).length === 0 ? '' : `${JSON.stringify(payload)}`,
+ value: Object.keys(payload).length === 0 ? '' : JSON.stringify(payload),
});
}
}
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index 551c8ea8955..bbde2408b89 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -252,6 +252,17 @@ export const notificationRenderers: IR<
>
);
},
+ 'localityimport-parse-succeeded'(notification) {
+ return (
+ <>
+ {localityText.localityImportParsed()}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
'localityimport-succeeded'(notification) {
const [isOpen, handleOpen, handleClose] = useBooleanState();
return (
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
index 4ce729c0c89..293c84b9ff8 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
@@ -181,8 +181,7 @@ export function QueryButton({
}
/**
- * Create a Record Set from all query results.
- * See also `CreateRecordSetFromQuery`
+ * Create a Record Set from query results.
*/
export function MakeRecordSetButton({
baseTableName,
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 5e3a631c52a..46965a73c42 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -314,11 +314,14 @@ export const localityText = createDictionary({
localityImportProgressing: {
'en-us': 'Importing Locality Data Set',
},
+ localityImportParsed: {
+ 'en-us': 'Locality Data Set Import Parsed',
+ },
localityImportFailed: {
- 'en-us': 'Import Locality Data Set Failed',
+ 'en-us': 'Locality Data Set Import Failed',
},
localityImportCancelled: {
- 'en-us': 'Import Locality Data Set Cancelled',
+ 'en-us': 'Locality Data Set Import Cancelled',
},
localityImportSucceeded: {
'en-us': 'Locality Data Set Import Succeeded',
@@ -338,6 +341,10 @@ export const localityText = createDictionary({
'en-us':
'More than one Locality found with guid: {guid:string}. Locality IDs: {localityIds: string}',
},
+ localityImportEffectCounts: {
+ 'en-us':
+ 'The following number of {localityTabelLabel: string} records will be affected by the import and {geoCoordDetailTableLabel: string} records will be created:',
+ },
localityUploadedDescription: {
'en-us':
'The following number of {localityTabelLabel: string} records were updated and {geoCoordDetailTableLabel: string} records were created:',
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 5ab9cc0a663..edd1ab4fcd6 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -935,7 +935,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.ABORTED, LocalityImportStatus.PARSED]
+ "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.ABORTED]
},
"taskinfo": {
"type": "string",
@@ -968,6 +968,30 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"required": ["taskstatus", "taskinfo"],
"additionalProperties": False
},
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityImportStatus.PARSED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "localities": {
+ "type": "number",
+ "example": 312,
+ },
+ "geocoorddetails": {
+ "type": "number",
+ "example": 204,
+ }
+ }
+ },
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ },
{
"type": "object",
"properties": {
From 59bcd420aaf9807fd1556da666ffce5a534fb61f Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 13 Jun 2024 09:30:04 -0500
Subject: [PATCH 60/71] Fix OpenAPI schema errors
---
specifyweb/context/openapi_schema.py | 1 +
specifyweb/specify/import_locality.py | 9 +++-
specifyweb/specify/views.py | 71 ++++++++++++++-------------
3 files changed, 47 insertions(+), 34 deletions(-)
diff --git a/specifyweb/context/openapi_schema.py b/specifyweb/context/openapi_schema.py
index d99532168a3..808c64f196b 100644
--- a/specifyweb/context/openapi_schema.py
+++ b/specifyweb/context/openapi_schema.py
@@ -414,6 +414,7 @@
"type": "string",
"enum": [
"array",
+ "null",
"boolean",
"integer",
"number",
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index b85f5505ac3..5af97beebda 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -39,7 +39,14 @@
"type": "object"
},
"geocoorddetail": {
- "type": [ "null", "string"],
+ "oneOf": [
+ {
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ]
},
"locality_id": {
"type": "number",
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index edd1ab4fcd6..a2f780710bc 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -814,7 +814,14 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"enum": ["Uploaded"]
},
"recordsetid": {
- "type": ["string", "null"]
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ]
},
"localities": {
"type": "array",
@@ -840,21 +847,14 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
}
},
"201": {
+ "description": "Task started by the worker. Returns the newly created ID of the task",
"content": {
- "application/json": {
+ "text/plain": {
"schema": {
- "description": "Task started by the worker. Returns the newly created ID of the task",
- "content": {
- "text/plain": {
- "schema": {
- "type": "string",
- "maxLength": 36,
- "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
- }
- }
- }
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
}
-
}
}
},
@@ -910,7 +910,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
if result["type"] == 'ParseError':
return result
-
+
localities = [row["locality"] for row in result["results"]]
recordset = create_localityimport_recordset(
@@ -1003,7 +1003,14 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"type": "object",
"properties": {
"recordsetid": {
- "type": ["number", "null"]
+ "oneOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
+ }
+ ]
},
"localities": {
"type": "array",
@@ -1049,15 +1056,17 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
}
}
},
- '404': {
+ "404": {
"description": 'The localityimport object with task id was not found',
"content": {
"text/plain": {
- "type": "string",
- "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ "schema": {
+ "type": "string",
+ "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ }
}
}
- },
+ }
}
},
})
@@ -1144,7 +1153,7 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
'properties': {
'type': {
'type': 'string',
- 'enum': ["ABORTED", "NOT_RUNNING"]
+ "enum": ["ABORTED", "NOT_RUNNING"]
},
'message': {
'type': 'string',
@@ -1161,8 +1170,10 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
"description": 'The localityimport object with task id was not found',
"content": {
"text/plain": {
- "type": "string",
- "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ "schema": {
+ "type": "string",
+ "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ }
}
}
}
@@ -1252,19 +1263,13 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
}
},
"201": {
+ "description": "Task started by the worker. Returns the newly created ID of the task",
"content": {
- "application/json": {
+ "text/plain": {
"schema": {
- "description": "Task started by the worker. Returns the newly created ID of the task",
- "content": {
- "text/plain": {
- "schema": {
- "type": "string",
- "maxLength": 36,
- "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
- }
- }
- }
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
}
}
}
From c411e0d23a4291cabb94be2d44b43d2f90ba155d Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 13 Jun 2024 12:20:37 -0500
Subject: [PATCH 61/71] Only reset headerError context on import
---
.../frontend/js_src/lib/components/LocalityImport/index.tsx | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
index 8862d0c8c18..71bcc90d62e 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
@@ -125,7 +125,10 @@ export function ImportLocalityDataSet(): JSX.Element {
{
handleParse(headers, data);
- resetContext();
+ setHeaderErrors({
+ missingRequiredHeaders: [] as RA,
+ unrecognizedHeaders: [] as RA,
+ });
}}
>
{commonText.import()}
From 177ae7adb6adae096f2bedd50d069205295846cc Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Thu, 13 Jun 2024 12:39:36 -0500
Subject: [PATCH 62/71] Update LocalityImport Notification renderers
---
.../Notifications/NotificationRenderers.tsx | 20 ++++++++++++++-----
specifyweb/specify/import_locality.py | 6 +++---
2 files changed, 18 insertions(+), 8 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index bbde2408b89..af9fceb6ec5 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -6,7 +6,7 @@ import { localityText } from '../../localization/locality';
import { mergingText } from '../../localization/merging';
import { notificationsText } from '../../localization/notifications';
import { StringToJsx } from '../../localization/utils';
-import type { IR } from '../../utils/types';
+import type { IR, RA } from '../../utils/types';
import { Button } from '../Atoms/Button';
import { Link } from '../Atoms/Link';
import { getTable } from '../DataModel/tables';
@@ -15,6 +15,7 @@ import {
LocalityImportErrors,
LocalityImportSuccess,
} from '../LocalityImport/Status';
+import type { LocalityImportParseError } from '../LocalityImport/types';
import { mergingQueryParameter } from '../Merging/queryString';
import { FormattedResource } from '../Molecules/FormattedResource';
import { TableIcon } from '../Molecules/TableIcon';
@@ -230,7 +231,10 @@ export const notificationRenderers: IR<
{isOpen && (
+ }
onClose={handleClose}
/>
)}
@@ -273,9 +277,15 @@ export const notificationRenderers: IR<
{isOpen && (
+ }
+ localityIds={
+ notification.payload.localities as unknown as RA
+ }
+ recordSetId={
+ notification.payload.recordsetid as unknown as number | undefined
+ }
onClose={handleClose}
/>
)}
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 5af97beebda..78c1e2b18bd 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -126,7 +126,7 @@ def progress(state, current: int, total: int):
Message.objects.create(user=li.specifyuser, content=json.dumps({
'type': 'localityimport-failed',
'taskid': li.taskid,
- 'errors': json.dumps([error.to_json() for error in results["errors"]])
+ 'errors': [error.to_json() for error in results["errors"]]
}))
elif results['type'] == 'Uploaded':
recordset_id = None if li.recordset is None else li.recordset.pk
@@ -168,12 +168,12 @@ def progress(state, current: int, total: int):
if li.status == LocalityImportStatus.FAILED:
self.update_state(LocalityImportStatus.FAILED, meta={
- "errors": errors})
+ "errors": [error.to_json() for error in errors]})
Message.objects.create(user=li.specifyuser, content=json.dumps({
'type': 'localityimport-failed',
'taskid': li.taskid,
- 'errors': json.dumps(errors)
+ 'errors': [error.to_json() for error in errors]
}))
elif li.status == LocalityImportStatus.PARSED:
From 4d6f5897453d116c6db66bb84559507b70af3b4f Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 17 Jun 2024 07:24:24 -0500
Subject: [PATCH 63/71] Insert a LocalityImportRowResult for each Parsed Row
---
specifyweb/specify/import_locality.py | 17 +++++------------
1 file changed, 5 insertions(+), 12 deletions(-)
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index 78c1e2b18bd..e1c50871b31 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -271,19 +271,12 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
)
else:
status = LocalityImportStatus.PARSED
- localities = len(to_upload)
- geocoorddetails = 0
for parsed in to_upload:
- if parsed['geocoorddetail'] is not None:
- geocoorddetails += 1
- LocalityImportRowResult.objects.create(
- localityimport=li,
- rownumber=-1,
- result=json.dumps({
- "localities": localities,
- "geocoorddetails": geocoorddetails
- })
- )
+ LocalityImportRowResult.objects.create(
+ localityimport=li,
+ rownumber=parsed["row_number"],
+ result=json.dumps(parsed, cls=DjangoJSONEncoder)
+ )
# the results come from upload_locality_set
else:
From e8a2cedd3f7037a9b50408562c896aae2527473e Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 17 Jun 2024 13:09:37 -0500
Subject: [PATCH 64/71] Handle backend exceptions and refactor Parsed return
---
.../lib/components/LocalityImport/Status.tsx | 101 ++++++++++++++++--
.../lib/components/LocalityImport/types.ts | 17 ++-
.../lib/components/LocalityImport/utils.ts | 1 +
.../Notifications/NotificationRenderers.tsx | 31 +++++-
.../js_src/lib/localization/locality.ts | 6 ++
specifyweb/specify/import_locality.py | 49 ++++++---
specifyweb/specify/views.py | 85 +++++++++++----
7 files changed, 241 insertions(+), 49 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 82f92dc2ea0..7a50c5c0af6 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -24,10 +24,12 @@ import { LoadingContext } from '../Core/Contexts';
import { fetchResource } from '../DataModel/resource';
import { tables } from '../DataModel/tables';
import { softFail } from '../Errors/Crash';
+import { produceStackTrace } from '../Errors/stackTrace';
import { RecordSelectorFromIds } from '../FormSliders/RecordSelectorFromIds';
import { mergeCellBackground } from '../Merging/Header';
import { useTitle } from '../Molecules/AppTitle';
import { Dialog } from '../Molecules/Dialog';
+import { downloadFile } from '../Molecules/FilePicker';
import { TableIcon } from '../Molecules/TableIcon';
import { hasToolPermission } from '../Permissions/helpers';
import { CreateRecordSet } from '../QueryBuilder/CreateRecordSet';
@@ -113,16 +115,26 @@ export function LocalityImportStatus({
/>
) : state.taskstatus === 'PARSED' ? (
geocoorddetail !== null
+ ).length
+ }
+ localities={state.taskinfo.rows.length}
onClose={handleClose}
onImport={handleImport}
/>
- ) : state.taskstatus === 'FAILED' ? (
-
+ ) : state.taskstatus === 'FAILED' ? (
+
) : state.taskstatus === 'PENDING' ? (
- ) : (
+ ) : state.taskstatus === 'ABORTED' ? (
{commonText.close()}}
children={null}
@@ -145,6 +157,47 @@ export function LocalityImportStatus({
modal={false}
onClose={handleClose}
/>
+ ) : (
+
+
+ void downloadFile(
+ `Locality Data Set ${taskId} Report - ${new Date().toJSON()}.txt`,
+ produceStackTrace(state.taskinfo)
+ )
+ }
+ >
+ {commonText.downloadErrorMessage()}
+
+
+ {commonText.close()}
+ >
+ }
+ dimensionsKey={statusDimensionKey}
+ header={localityText.localityImportWentWrong()}
+ modal={false}
+ onClose={handleClose}
+ >
+
+ {state.taskstatus}
+ {typeof state.taskinfo === 'object' ? (
+
+ {Object.entries(state.taskinfo).map(([key, message], index) => (
+
+ {commonText.colonLine({
+ label: key,
+ value: message,
+ })}
+
+ ))}
+
+ ) : (
+ {state.taskinfo}
+ )}
+
+
);
}
@@ -331,7 +384,43 @@ export function LocalityImportSuccess({
);
}
-export function LocalityImportErrors({
+export function LocalityImportFailed({
+ taskId,
+ traceback,
+ onClose: handleClose,
+}: {
+ readonly taskId: string;
+ readonly traceback: string;
+ readonly onClose: () => void;
+}): JSX.Element {
+ return (
+
+
+ void downloadFile(
+ `Locality Data Set ${taskId} Crash Report - ${new Date().toJSON()}.txt`,
+ produceStackTrace(traceback)
+ )
+ }
+ >
+ {commonText.downloadErrorMessage()}
+
+
+ {commonText.close()}
+ >
+ }
+ header={localityText.localityImportFailed()}
+ icon="error"
+ onClose={handleClose}
+ >
+ {localityText.localityImportWentWrong()}
+
+ );
+}
+
+export function LocalityImportParseErrors({
errors,
onClose: handleClose,
}: {
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
index d9c5ce4d312..bd993fa836c 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
@@ -21,6 +21,7 @@ export type LocalityImportParseError = {
export type LocalityImportTaskStatus =
| 'ABORTED'
| 'FAILED'
+ | 'PARSE_FAILED'
| 'PARSED'
| 'PARSING'
| 'PENDING'
@@ -36,6 +37,16 @@ export type LocalityImportState =
'FAILED',
{
readonly taskstatus: 'FAILED';
+ readonly taskinfo: {
+ readonly error: string;
+ readonly traceback: string;
+ };
+ }
+ >
+ | State<
+ 'PARSE_FAILED',
+ {
+ readonly taskstatus: 'PARSE_FAILED';
readonly taskinfo: {
readonly errors: RA;
};
@@ -46,8 +57,10 @@ export type LocalityImportState =
{
readonly taskstatus: 'PARSED';
readonly taskinfo: {
- readonly localities: number;
- readonly geocoorddetails: number;
+ readonly rows: RA<{
+ readonly locality: object;
+ readonly geocoorddetail: object | null;
+ }>;
};
}
>
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
index ea6be2554fa..097593fce25 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
@@ -36,6 +36,7 @@ export const localityImportStatusLocalization: RR<
PENDING: localityText.localityImportStarting(),
PROGRESS: localityText.localityImportProgressing(),
FAILED: localityText.localityImportFailed(),
+ PARSE_FAILED: localityText.localityImportParseFailure(),
ABORTED: localityText.localityImportCancelled(),
SUCCEEDED: localityText.localityImportSucceeded(),
};
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index af9fceb6ec5..f296bae4c14 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -12,7 +12,8 @@ import { Link } from '../Atoms/Link';
import { getTable } from '../DataModel/tables';
import { userInformation } from '../InitialContext/userInformation';
import {
- LocalityImportErrors,
+ LocalityImportFailed,
+ LocalityImportParseErrors,
LocalityImportSuccess,
} from '../LocalityImport/Status';
import type { LocalityImportParseError } from '../LocalityImport/types';
@@ -221,16 +222,16 @@ export const notificationRenderers: IR<
>
);
},
- 'localityimport-failed'(notification) {
+ 'localityimport-parse-failed'(notification) {
const [isOpen, handleOpen, handleClose] = useBooleanState();
return (
<>
- {localityText.localityImportFailed()}
+ {localityText.localityImportParseFailure()}
{localityText.localityImportFailureResults()}
{isOpen && (
-
@@ -245,6 +246,28 @@ export const notificationRenderers: IR<
>
);
},
+ 'localityimport-failed'(notification) {
+ const [isOpen, handleOpen, handleClose] = useBooleanState();
+ return (
+ <>
+ {localityText.localityImportFailed()}
+
+ {localityText.localityImportFailureResults()}
+
+ {isOpen && (
+
+ )}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
'localityimport-aborted'(notification) {
return (
<>
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 46965a73c42..7960b767986 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -320,12 +320,18 @@ export const localityText = createDictionary({
localityImportFailed: {
'en-us': 'Locality Data Set Import Failed',
},
+ localityImportParseFailure: {
+ 'en-us': 'Locality Data Set Parse Failed',
+ },
localityImportCancelled: {
'en-us': 'Locality Data Set Import Cancelled',
},
localityImportSucceeded: {
'en-us': 'Locality Data Set Import Succeeded',
},
+ localityImportWentWrong: {
+ 'en-us': 'Something went wrong during the Locality Import process',
+ },
localityImportErrorFileName: {
comment:
'The file name which is used when Parse Errors are exported. The .csv file extension is appended to the end of this string',
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/import_locality.py
index e1c50871b31..2765c0afdd3 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/import_locality.py
@@ -101,10 +101,27 @@ class LocalityImportStatus:
PROGRESS = 'PROGRESS'
SUCCEEDED = 'SUCCEEDED'
ABORTED = 'ABORTED'
+ PARSE_FAILED = 'PARSE_FAILED'
FAILED = 'FAILED'
-@app.task(base=LogErrorsTask, bind=True)
+class LocalityImportTask(LogErrorsTask):
+ def on_failure(self, exc, task_id, args, kwargs, einfo):
+ with transaction.atomic():
+ locality_import = LocalityImport.objects.get(taskid=task_id)
+
+ Message.objects.create(user=locality_import.specifyuser, content=json.dumps({
+ 'type': 'localityimport-failed',
+ 'taskid': task_id,
+ 'traceback': str(einfo.traceback)
+ }))
+ locality_import.status = LocalityImportStatus.FAILED
+ locality_import.save()
+
+ return super().on_failure(exc, task_id, args, kwargs, einfo)
+
+
+@app.task(base=LocalityImportTask, bind=True)
def import_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]], create_recordset: bool) -> None:
def progress(state, current: int, total: int):
self.update_state(state=state, meta={
@@ -120,11 +137,11 @@ def progress(state, current: int, total: int):
self.request.id, results, collection, create_recordset)
if results['type'] == 'ParseError':
- self.update_state(LocalityImportStatus.FAILED, meta={
+ self.update_state(LocalityImportStatus.PARSE_FAILED, meta={
"errors": [error.to_json() for error in results["errors"]]})
Message.objects.create(user=li.specifyuser, content=json.dumps({
- 'type': 'localityimport-failed',
+ 'type': 'localityimport-parse-failed',
'taskid': li.taskid,
'errors': [error.to_json() for error in results["errors"]]
}))
@@ -151,7 +168,7 @@ def progress(state, current: int, total: int):
raise Ignore()
-@app.task(base=LogErrorsTask, bind=True)
+@app.task(base=LocalityImportTask, bind=True)
def parse_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]]):
def progress(state, current: int, total: int):
self.update_state(state=state, meta={
@@ -166,12 +183,12 @@ def progress(state, current: int, total: int):
li = resolve_localityimport_result(
self.request.id, (to_upload, errors), collection)
- if li.status == LocalityImportStatus.FAILED:
- self.update_state(LocalityImportStatus.FAILED, meta={
+ if li.status == LocalityImportStatus.PARSE_FAILED:
+ self.update_state(LocalityImportStatus.PARSE_FAILED, meta={
"errors": [error.to_json() for error in errors]})
Message.objects.create(user=li.specifyuser, content=json.dumps({
- 'type': 'localityimport-failed',
+ 'type': 'localityimport-parse-failed',
'taskid': li.taskid,
'errors': [error.to_json() for error in errors]
}))
@@ -257,11 +274,13 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
li = LocalityImport.objects.get(taskid=taskid)
+ li.results.get_queryset().delete()
+
# the results come from parse_locality_set
if isinstance(results, tuple):
to_upload, errors = results
if len(errors) > 0:
- status = LocalityImportStatus.FAILED
+ li.status = LocalityImportStatus.PARSE_FAILED
for error in errors:
result = error.to_json()
LocalityImportRowResult.objects.create(
@@ -270,7 +289,7 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
result=json.dumps(result, cls=DjangoJSONEncoder)
)
else:
- status = LocalityImportStatus.PARSED
+ li.status = LocalityImportStatus.PARSED
for parsed in to_upload:
LocalityImportRowResult.objects.create(
localityimport=li,
@@ -281,7 +300,7 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
# the results come from upload_locality_set
else:
if results['type'] == 'ParseError':
- status = LocalityImportStatus.FAILED
+ li.status = LocalityImportStatus.PARSE_FAILED
for error in results['errors']:
result = error.to_json()
LocalityImportRowResult.objects.create(
@@ -291,7 +310,7 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
)
elif results['type'] == 'Uploaded':
- status = LocalityImportStatus.SUCCEEDED
+ li.status = LocalityImportStatus.SUCCEEDED
localities = []
for index, row in enumerate(results['results']):
row_number = index + 1
@@ -306,7 +325,6 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
li.recordset = create_localityimport_recordset(
collection, li.specifyuser, localities) if create_recordset else None
- li.status = status
li.save()
return li
@@ -435,11 +453,8 @@ def upload_from_parsed(uploadables: List[ParsedRow], progress: Optional[Progress
for field, value in parsed_row['locality'].items():
setattr(locality, field, value)
locality.save()
- try:
- uploaded[parsed_row['row_number'] -
- 1]["locality"] = locality_id
- except:
- raise KeyError(uploaded)
+ uploaded[parsed_row['row_number'] -
+ 1]["locality"] = locality_id
if parsed_row['geocoorddetail'] is not None:
locality.geocoorddetails.get_queryset().delete()
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index a2f780710bc..b3b9172536f 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -978,13 +978,34 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"taskinfo": {
"type": "object",
"properties": {
- "localities": {
- "type": "number",
- "example": 312,
- },
- "geocoorddetails": {
- "type": "number",
- "example": 204,
+ "rows": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "locality": {
+ "type": "object"
+ },
+ "geocoorddetail": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "locality_id": {
+ "description": "The ID of the matched Locality",
+ "type": "number"
+ },
+ "row_number": {
+ "type" : "number"
+ }
+ },
+ "required": ["locality", "geocoorddetail"]
+ }
}
}
},
@@ -1039,7 +1060,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.FAILED]
+ "enum": [LocalityImportStatus.PARSE_FAILED]
},
"taskinfo": {
"type": "object",
@@ -1050,6 +1071,28 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
},
"required": ["taskstatus", "taskinfo"],
"additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityImportStatus.FAILED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "error": {
+ "type": "string"
+ },
+ "traceback": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
}
]
}
@@ -1079,16 +1122,22 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
result = import_locality_task.AsyncResult(locality_import.taskid)
- resolved_state = LocalityImportStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else result.state
+ resolved_state = LocalityImportStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else LocalityImportStatus.FAILED if result.state == CELERY_TASK_STATE.FAILURE else result.state
status = {
'taskstatus': resolved_state,
'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
}
- if locality_import.status == LocalityImportStatus.FAILED:
+ if resolved_state == LocalityImportStatus.FAILED:
+ status["taskinfo"] = {
+ 'error': str(result.result),
+ 'traceback': str(result.traceback)
+ }
+
+ elif locality_import.status == LocalityImportStatus.PARSE_FAILED:
- status["taskstatus"] = LocalityImportStatus.FAILED
+ status["taskstatus"] = LocalityImportStatus.PARSE_FAILED
if isinstance(result.info, dict) and 'errors' in result.info.keys():
errors = result.info["errors"]
@@ -1101,16 +1150,12 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
elif locality_import.status == LocalityImportStatus.PARSED:
status["taskstatus"] = LocalityImportStatus.PARSED
- if isinstance(result.info, dict) and resolved_state == LocalityImportStatus.PARSED:
- result = {
- "localities": result.info["localities"],
- "geocoorddetails": result.info["geocoorddetails"]
- }
- else:
- results = locality_import.results.get_queryset().get(rownumber=-1)
- result = json.loads(results.result)
+ results = locality_import.results.all()
+ rows = [json.loads(row.result) for row in results]
- status["taskinfo"] = result
+ status["taskinfo"] = {
+ "rows": rows
+ }
elif locality_import.status == LocalityImportStatus.SUCCEEDED:
status["taskstatus"] = LocalityImportStatus.SUCCEEDED
From 13c5b59c2e821d8a1356bb3314c666f79e2b5167 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Mon, 17 Jun 2024 15:22:28 -0500
Subject: [PATCH 65/71] Extract Unknown State to own component
---
.../lib/components/LocalityImport/Status.tsx | 99 +++++++++++--------
1 file changed, 60 insertions(+), 39 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
index 7a50c5c0af6..698be3e9b8d 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
@@ -158,46 +158,11 @@ export function LocalityImportStatus({
onClose={handleClose}
/>
) : (
-
-
- void downloadFile(
- `Locality Data Set ${taskId} Report - ${new Date().toJSON()}.txt`,
- produceStackTrace(state.taskinfo)
- )
- }
- >
- {commonText.downloadErrorMessage()}
-
-
- {commonText.close()}
- >
- }
- dimensionsKey={statusDimensionKey}
- header={localityText.localityImportWentWrong()}
- modal={false}
+
-
- {state.taskstatus}
- {typeof state.taskinfo === 'object' ? (
-
- {Object.entries(state.taskinfo).map(([key, message], index) => (
-
- {commonText.colonLine({
- label: key,
- value: message,
- })}
-
- ))}
-
- ) : (
- {state.taskinfo}
- )}
-
-
+ />
);
}
@@ -503,3 +468,59 @@ export function LocalityImportParseErrors({
);
}
+
+function LocalityImportUnknownState({
+ taskId,
+ state: { taskstatus, taskinfo },
+ onClose: handleClose,
+}: {
+ readonly taskId: string;
+ readonly state: {
+ readonly taskstatus: string;
+ readonly taskinfo: object | string;
+ };
+ readonly onClose: () => void;
+}): JSX.Element {
+ return (
+
+
+ void downloadFile(
+ `Locality Data Set ${taskId} Report - ${new Date().toJSON()}.txt`,
+ produceStackTrace(taskinfo)
+ )
+ }
+ >
+ {commonText.downloadErrorMessage()}
+
+
+ {commonText.close()}
+ >
+ }
+ dimensionsKey={statusDimensionKey}
+ header={localityText.localityImportWentWrong()}
+ modal={false}
+ onClose={handleClose}
+ >
+
+ {taskstatus}
+ {typeof taskinfo === 'object' ? (
+
+ {Object.entries(taskinfo).map(([key, message], index) => (
+
+ {commonText.colonLine({
+ label: key,
+ value: message,
+ })}
+
+ ))}
+
+ ) : (
+ {taskinfo}
+ )}
+
+
+ );
+}
From 66fca8f5d8ac1010d391e1e2c842dda3e2338cdd Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 18 Jun 2024 08:39:29 -0500
Subject: [PATCH 66/71] Rename tool to 'Locality Update Tool'
---
.../components/Header/userToolDefinitions.ts | 2 +-
.../Status.tsx | 158 +++++++++---------
.../index.tsx | 36 ++--
.../types.ts | 10 +-
.../utils.ts | 32 ++--
.../Notifications/NotificationRenderers.tsx | 48 +++---
.../js_src/lib/components/Router/Routes.tsx | 5 +-
.../js_src/lib/localization/header.ts | 4 +-
.../js_src/lib/localization/locality.ts | 67 ++++----
...localityupdate_localityupdaterowresult.py} | 30 +++-
.../migrations/0007_auto_20240613_0348.py | 28 ----
specifyweb/notifications/models.py | 22 +--
...{import_locality.py => update_locality.py} | 96 +++++------
specifyweb/specify/urls.py | 4 +-
specifyweb/specify/views.py | 84 +++++-----
15 files changed, 311 insertions(+), 315 deletions(-)
rename specifyweb/frontend/js_src/lib/components/{LocalityImport => LocalityUpdate}/Status.tsx (84%)
rename specifyweb/frontend/js_src/lib/components/{LocalityImport => LocalityUpdate}/index.tsx (83%)
rename specifyweb/frontend/js_src/lib/components/{LocalityImport => LocalityUpdate}/types.ts (90%)
rename specifyweb/frontend/js_src/lib/components/{LocalityImport => LocalityUpdate}/utils.ts (65%)
rename specifyweb/notifications/migrations/{0006_localityimport.py => 0006_localityupdate_localityupdaterowresult.py} (64%)
delete mode 100644 specifyweb/notifications/migrations/0007_auto_20240613_0348.py
rename specifyweb/specify/{import_locality.py => update_locality.py} (86%)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
index 24abb14f585..f27bd4d392b 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
+++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
@@ -101,7 +101,7 @@ const rawUserTools = ensure>>>()({
},
[commonText.import()]: {
coGeImport: {
- title: headerText.importLocalityDataset(),
+ title: headerText.localityUpdateTool(),
url: '/specify/import/locality-dataset/',
icon: icons.globe,
},
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/Status.tsx
similarity index 84%
rename from specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
rename to specifyweb/frontend/js_src/lib/components/LocalityUpdate/Status.tsx
index 698be3e9b8d..2c3d307e3da 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/Status.tsx
@@ -37,18 +37,18 @@ import { downloadDataSet } from '../WorkBench/helpers';
import { RemainingLoadingTime } from '../WorkBench/RemainingLoadingTime';
import { TableRecordCounts } from '../WorkBench/Results';
import type {
- LocalityImportParseError,
- LocalityImportState,
- LocalityImportTaskStatus,
+ LocalityUpdateParseError,
+ LocalityUpdateState,
+ LocalityUpdateTaskStatus,
} from './types';
import {
- localityImportStatusLocalization,
- resolveImportLocalityErrorMessage,
+ localityUpdateStatusLocalization,
+ resolveLocalityUpdateErrorMessage,
} from './utils';
-const statusDimensionKey = 'localityimport-status';
+const statusDimensionKey = 'localityupdate-status';
-export function LocalityImportStatus({
+export function LocalityUpdateStatus({
taskId,
onImport: handleImport,
onClose: handleClose,
@@ -57,7 +57,7 @@ export function LocalityImportStatus({
readonly onImport: () => void;
readonly onClose: () => void;
}): JSX.Element {
- const [state, setState] = React.useState({
+ const [state, setState] = React.useState({
taskstatus: 'PENDING',
type: 'PENDING',
taskinfo: 'None',
@@ -66,7 +66,7 @@ export function LocalityImportStatus({
React.useEffect(() => {
let destructorCalled = false;
const fetchStatus = () =>
- void ajax(`/api/localityset/status/${taskId}/`, {
+ void ajax(`/api/localityset/status/${taskId}/`, {
headers: { Accept: 'application/json' },
})
.then(({ data }) => {
@@ -74,7 +74,7 @@ export function LocalityImportStatus({
if (
!destructorCalled &&
(
- ['PENDING', 'PARSING', 'PROGRESS'] as RA
+ ['PENDING', 'PARSING', 'PROGRESS'] as RA
).includes(data.taskstatus)
)
globalThis.setTimeout(fetchStatus, SECOND);
@@ -95,11 +95,11 @@ export function LocalityImportStatus({
[taskId]
);
- const title = localityImportStatusLocalization[state.taskstatus];
+ const title = localityUpdateStatusLocalization[state.taskstatus];
useTitle(title);
return state.taskstatus === 'PARSING' || state.taskstatus === 'PROGRESS' ? (
-
) : state.taskstatus === 'SUCCEEDED' ? (
-
) : state.taskstatus === 'PARSED' ? (
- geocoorddetail !== null
@@ -125,12 +125,12 @@ export function LocalityImportStatus({
onImport={handleImport}
/>
) : state.taskstatus === 'PARSE_FAILED' ? (
-
) : state.taskstatus === 'FAILED' ? (
-
) : (
-
>
}
- header={localityImportStatusLocalization.PARSED}
+ header={localityUpdateStatusLocalization.PARSED}
modal={false}
onClose={handleClose}
>
- {localityText.localityImportEffectCounts({
+ {localityText.localityUpdateEffectCounts({
localityTabelLabel: tables.Locality.label,
geoCoordDetailTableLabel: tables.GeoCoordDetail.label,
})}
@@ -253,7 +253,7 @@ export function LocalityImportParsed({
);
}
-export function LocalityImportSuccess({
+export function LocalityUpdateSuccess({
localityIds,
geoCoordDetailIds,
recordSetId,
@@ -280,7 +280,7 @@ export function LocalityImportSuccess({
return (
{commonText.close()}}
- header={localityImportStatusLocalization.SUCCEEDED}
+ header={localityUpdateStatusLocalization.SUCCEEDED}
modal={false}
onClose={handleClose}
>
@@ -333,7 +333,7 @@ export function LocalityImportSuccess({
isInRecordSet={false}
newResource={undefined}
table={tables.Locality}
- title={localityText.localityImportResults()}
+ title={localityText.localityUpdateResults()}
totalCount={localityIds.length}
onAdd={undefined}
onClone={undefined}
@@ -349,47 +349,11 @@ export function LocalityImportSuccess({
);
}
-export function LocalityImportFailed({
- taskId,
- traceback,
- onClose: handleClose,
-}: {
- readonly taskId: string;
- readonly traceback: string;
- readonly onClose: () => void;
-}): JSX.Element {
- return (
-
-
- void downloadFile(
- `Locality Data Set ${taskId} Crash Report - ${new Date().toJSON()}.txt`,
- produceStackTrace(traceback)
- )
- }
- >
- {commonText.downloadErrorMessage()}
-
-
- {commonText.close()}
- >
- }
- header={localityText.localityImportFailed()}
- icon="error"
- onClose={handleClose}
- >
- {localityText.localityImportWentWrong()}
-
- );
-}
-
-export function LocalityImportParseErrors({
+export function LocalityUpdateParseErrors({
errors,
onClose: handleClose,
}: {
- readonly errors: RA;
+ readonly errors: RA;
readonly onClose: () => void;
}): JSX.Element | null {
const loading = React.useContext(LoadingContext);
@@ -401,9 +365,9 @@ export function LocalityImportParseErrors({
{commonText.close()}
{
- const fileName = `${localityText.localityImportErrorFileName({
+ const fileName = localityText.localityUpdateParseErrorFileName({
date: new Date().toDateString(),
- })}.csv`;
+ });
const columns = [
preferencesText.row(),
@@ -415,12 +379,14 @@ export function LocalityImportParseErrors({
({ message, payload, field, rowNumber }) => [
rowNumber.toString(),
field,
- resolveImportLocalityErrorMessage(message, payload),
+ resolveLocalityUpdateErrorMessage(message, payload),
]
);
loading(
- downloadDataSet(fileName, data, columns, ',').catch(softFail)
+ downloadDataSet(`${fileName}.csv`, data, columns, ',').catch(
+ softFail
+ )
);
}}
>
@@ -428,7 +394,7 @@ export function LocalityImportParseErrors({
>
}
- header={localityText.localityImportFailureResults()}
+ header={localityText.localityUpdateFailureResults()}
icon="error"
specialMode="noGradient"
onClose={handleClose}
@@ -461,7 +427,7 @@ export function LocalityImportParseErrors({
| {rowNumber} |
{field} |
- {resolveImportLocalityErrorMessage(message, payload)} |
+ {resolveLocalityUpdateErrorMessage(message, payload)} |
))}
@@ -469,7 +435,45 @@ export function LocalityImportParseErrors({
);
}
-function LocalityImportUnknownState({
+export function LocalityUpdateFailed({
+ taskId,
+ traceback,
+ onClose: handleClose,
+}: {
+ readonly taskId: string;
+ readonly traceback: string;
+ readonly onClose: () => void;
+}): JSX.Element {
+ return (
+
+ {
+ const fileName = localityText.localityUpdateCrashFileName({
+ taskId,
+ date: new Date().toDateString(),
+ });
+
+ downloadFile(`${fileName}.txt`, produceStackTrace(traceback));
+ }}
+ >
+ {commonText.downloadErrorMessage()}
+
+
+ {commonText.close()}
+ >
+ }
+ header={localityText.localityUpdateFailed()}
+ icon="error"
+ onClose={handleClose}
+ >
+ {localityText.localityUpdateWentWrong()}
+
+ );
+}
+
+function LocalityUpdateUnknownState({
taskId,
state: { taskstatus, taskinfo },
onClose: handleClose,
@@ -486,12 +490,14 @@ function LocalityImportUnknownState({
buttons={
<>
- void downloadFile(
- `Locality Data Set ${taskId} Report - ${new Date().toJSON()}.txt`,
- produceStackTrace(taskinfo)
- )
- }
+ onClick={(): void => {
+ const fileName = localityText.localityUpdateCrashFileName({
+ taskId,
+ date: new Date().toDateString(),
+ });
+
+ downloadFile(`${fileName}.txt`, produceStackTrace(taskinfo));
+ }}
>
{commonText.downloadErrorMessage()}
@@ -500,7 +506,7 @@ function LocalityImportUnknownState({
>
}
dimensionsKey={statusDimensionKey}
- header={localityText.localityImportWentWrong()}
+ header={localityText.localityUpdateWentWrong()}
modal={false}
onClose={handleClose}
>
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx
similarity index 83%
rename from specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
rename to specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx
index 71bcc90d62e..738524cd8f6 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx
@@ -12,16 +12,16 @@ import { formatConjunction } from '../Atoms/Internationalization';
import { LoadingContext } from '../Core/Contexts';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
-import { LocalityImportStatus } from './Status';
-import type { LocalityImportHeader } from './types';
+import { LocalityUpdateStatus } from './Status';
+import type { LocalityUpdateHeader } from './types';
import {
- localityImportAcceptedHeaders,
- localityImportRequiredHeaders,
+ localityUpdateAcceptedHeaders,
+ localityUpdateRequiredHeaders,
} from './utils';
-export function ImportLocalityDataSet(): JSX.Element {
+export function LocalityUpdateFromDataSet(): JSX.Element {
const [headerErrors, setHeaderErrors] = React.useState({
- missingRequiredHeaders: [] as RA,
+ missingRequiredHeaders: [] as RA,
unrecognizedHeaders: [] as RA,
});
@@ -33,7 +33,7 @@ export function ImportLocalityDataSet(): JSX.Element {
function resetContext(): void {
setHeaderErrors({
- missingRequiredHeaders: [] as RA,
+ missingRequiredHeaders: [] as RA,
unrecognizedHeaders: [] as RA,
});
setHeaders([]);
@@ -79,15 +79,15 @@ export function ImportLocalityDataSet(): JSX.Element {
<>
{
const foundHeaderErrors = headers.reduce(
(accumulator, currentHeader) => {
const parsedHeader = currentHeader
.toLowerCase()
- .trim() as LocalityImportHeader;
+ .trim() as LocalityUpdateHeader;
const isUnknown =
- !localityImportAcceptedHeaders().has(parsedHeader);
+ !localityUpdateAcceptedHeaders().has(parsedHeader);
return {
missingRequiredHeaders:
@@ -100,7 +100,7 @@ export function ImportLocalityDataSet(): JSX.Element {
};
},
{
- missingRequiredHeaders: Array.from(localityImportRequiredHeaders),
+ missingRequiredHeaders: Array.from(localityUpdateRequiredHeaders),
unrecognizedHeaders: [] as RA,
}
);
@@ -126,7 +126,7 @@ export function ImportLocalityDataSet(): JSX.Element {
onClick={(): void => {
handleParse(headers, data);
setHeaderErrors({
- missingRequiredHeaders: [] as RA,
+ missingRequiredHeaders: [] as RA,
unrecognizedHeaders: [] as RA,
});
}}
@@ -136,7 +136,7 @@ export function ImportLocalityDataSet(): JSX.Element {
)}
>
}
- header={localityText.localityImportHeaderError()}
+ header={localityText.localityUpdateHeaderError()}
icon={
headerErrors.missingRequiredHeaders.length === 0
? 'warning'
@@ -147,7 +147,7 @@ export function ImportLocalityDataSet(): JSX.Element {
<>
{headerErrors.missingRequiredHeaders.length > 0 && (
<>
- {localityText.localityImportMissingHeader()}
+ {localityText.localityUpdateMissingHeader()}
{formatConjunction(
headerErrors.missingRequiredHeaders as RA
@@ -157,7 +157,7 @@ export function ImportLocalityDataSet(): JSX.Element {
)}
{headerErrors.unrecognizedHeaders.length > 0 && (
<>
- {localityText.localityImportUnrecognizedHeaders()}
+ {localityText.localityUpdateUnrecognizedHeaders()}
{formatConjunction(
headerErrors.unrecognizedHeaders as RA
@@ -165,11 +165,11 @@ export function ImportLocalityDataSet(): JSX.Element {
>
)}
- {localityText.localityImportedAcceptedHeaders()}
+ {localityText.localityUpdateAcceptedHeaders()}
{formatConjunction(
Array.from(
- localityImportAcceptedHeaders()
+ localityUpdateAcceptedHeaders()
) as unknown as RA
)}
@@ -177,7 +177,7 @@ export function ImportLocalityDataSet(): JSX.Element {
)}
{taskId === undefined ? undefined : (
- setTaskId(undefined)}
onImport={(): void => handleImport(headers, data)}
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/types.ts
similarity index 90%
rename from specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
rename to specifyweb/frontend/js_src/lib/components/LocalityUpdate/types.ts
index bd993fa836c..e71a7137678 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/types.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/types.ts
@@ -3,7 +3,7 @@ import type { State } from 'typesafe-reducer';
import type { IR, RA } from '../../utils/types';
import type { Tables } from '../DataModel/types';
-export type LocalityImportHeader = Exclude<
+export type LocalityUpdateHeader = Exclude<
Lowercase<
| keyof Tables['GeoCoordDetail']['fields']
| keyof Tables['Locality']['fields']
@@ -11,14 +11,14 @@ export type LocalityImportHeader = Exclude<
'locality'
>;
-export type LocalityImportParseError = {
+export type LocalityUpdateParseError = {
readonly message: string;
readonly field: string;
readonly payload: IR;
readonly rowNumber: number;
};
-export type LocalityImportTaskStatus =
+export type LocalityUpdateTaskStatus =
| 'ABORTED'
| 'FAILED'
| 'PARSE_FAILED'
@@ -28,7 +28,7 @@ export type LocalityImportTaskStatus =
| 'PROGRESS'
| 'SUCCEEDED';
-export type LocalityImportState =
+export type LocalityUpdateState =
| State<
'ABORTED',
{ readonly taskstatus: 'ABORTED'; readonly taskinfo: string }
@@ -48,7 +48,7 @@ export type LocalityImportState =
{
readonly taskstatus: 'PARSE_FAILED';
readonly taskinfo: {
- readonly errors: RA;
+ readonly errors: RA;
};
}
>
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/utils.ts
similarity index 65%
rename from specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
rename to specifyweb/frontend/js_src/lib/components/LocalityUpdate/utils.ts
index 097593fce25..a14c686ace2 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityImport/utils.ts
+++ b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/utils.ts
@@ -7,41 +7,41 @@ import type { IR, RA, RR } from '../../utils/types';
import { tables } from '../DataModel/tables';
import type { Tables } from '../DataModel/types';
import { resolveBackendParsingMessage } from '../WorkBench/resultsParser';
-import type { LocalityImportHeader, LocalityImportTaskStatus } from './types';
+import type { LocalityUpdateHeader, LocalityUpdateTaskStatus } from './types';
-export const localityImportAcceptedLocalityFields: RA<
+const localityUpdateAcceptedLocalityFields: RA<
Lowercase
> = ['guid', 'datum', 'latitude1', 'longitude1'];
-export const localityImportAcceptedHeaders = f.store(
+export const localityUpdateAcceptedHeaders = f.store(
() =>
new Set([
- ...localityImportAcceptedLocalityFields,
+ ...localityUpdateAcceptedLocalityFields,
...tables.GeoCoordDetail.literalFields
.map(({ name }) => name.toLowerCase())
.filter((header) => header !== 'locality'),
])
);
-export const localityImportRequiredHeaders = new Set([
+export const localityUpdateRequiredHeaders = new Set([
'guid',
]);
-export const localityImportStatusLocalization: RR<
- LocalityImportTaskStatus,
+export const localityUpdateStatusLocalization: RR<
+ LocalityUpdateTaskStatus,
LocalizedString
> = {
- PARSED: localityText.localityImportParsed(),
- PARSING: localityText.localityImportParsing(),
- PENDING: localityText.localityImportStarting(),
- PROGRESS: localityText.localityImportProgressing(),
- FAILED: localityText.localityImportFailed(),
- PARSE_FAILED: localityText.localityImportParseFailure(),
- ABORTED: localityText.localityImportCancelled(),
- SUCCEEDED: localityText.localityImportSucceeded(),
+ PARSED: localityText.localityUpdateParsed(),
+ PARSING: localityText.localityUpdateParsing(),
+ PENDING: localityText.localityUpdateStarting(),
+ PROGRESS: localityText.localityUpdateProgressing(),
+ FAILED: localityText.localityUpdateFailed(),
+ PARSE_FAILED: localityText.localityUpdateParseFailure(),
+ ABORTED: localityText.localityUpdateCancelled(),
+ SUCCEEDED: localityText.localityUpdateSucceeded(),
};
-export function resolveImportLocalityErrorMessage(
+export function resolveLocalityUpdateErrorMessage(
key: string,
payload: IR
): LocalizedString {
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index f296bae4c14..78ca2adfb10 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -12,11 +12,11 @@ import { Link } from '../Atoms/Link';
import { getTable } from '../DataModel/tables';
import { userInformation } from '../InitialContext/userInformation';
import {
- LocalityImportFailed,
- LocalityImportParseErrors,
- LocalityImportSuccess,
-} from '../LocalityImport/Status';
-import type { LocalityImportParseError } from '../LocalityImport/types';
+ LocalityUpdateFailed,
+ LocalityUpdateParseErrors,
+ LocalityUpdateSuccess,
+} from '../LocalityUpdate/Status';
+import type { LocalityUpdateParseError } from '../LocalityUpdate/types';
import { mergingQueryParameter } from '../Merging/queryString';
import { FormattedResource } from '../Molecules/FormattedResource';
import { TableIcon } from '../Molecules/TableIcon';
@@ -211,10 +211,10 @@ export const notificationRenderers: IR<
)
);
},
- 'localityimport-starting'(notification) {
+ 'localityupdate-starting'(notification) {
return (
<>
- {localityText.localityImportStarted()}
+ {localityText.localityUpdateStarted()}
{localityText.taskId()}
{notification.payload.taskid}
@@ -222,19 +222,19 @@ export const notificationRenderers: IR<
>
);
},
- 'localityimport-parse-failed'(notification) {
+ 'localityupdate-parse-failed'(notification) {
const [isOpen, handleOpen, handleClose] = useBooleanState();
return (
<>
- {localityText.localityImportParseFailure()}
+ {localityText.localityUpdateParseFailure()}
- {localityText.localityImportFailureResults()}
+ {localityText.localityUpdateFailureResults()}
{isOpen && (
-
+ .errors as unknown as RA
}
onClose={handleClose}
/>
@@ -246,16 +246,16 @@ export const notificationRenderers: IR<
>
);
},
- 'localityimport-failed'(notification) {
+ 'localityupdate-failed'(notification) {
const [isOpen, handleOpen, handleClose] = useBooleanState();
return (
<>
- {localityText.localityImportFailed()}
+ {localityText.localityUpdateFailed()}
- {localityText.localityImportFailureResults()}
+ {localityText.localityUpdateFailureResults()}
{isOpen && (
-
);
},
- 'localityimport-aborted'(notification) {
+ 'localityupdate-aborted'(notification) {
return (
<>
- {localityText.localityImportCancelled()}
+ {localityText.localityUpdateCancelled()}
{localityText.taskId()}
{notification.payload.taskid}
@@ -279,10 +279,10 @@ export const notificationRenderers: IR<
>
);
},
- 'localityimport-parse-succeeded'(notification) {
+ 'localityupdate-parse-succeeded'(notification) {
return (
<>
- {localityText.localityImportParsed()}
+ {localityText.localityUpdateParsed()}
{localityText.taskId()}
{notification.payload.taskid}
@@ -290,16 +290,16 @@ export const notificationRenderers: IR<
>
);
},
- 'localityimport-succeeded'(notification) {
+ 'localityupdate-succeeded'(notification) {
const [isOpen, handleOpen, handleClose] = useBooleanState();
return (
<>
- {localityText.localityImportSucceeded()}
+ {localityText.localityUpdateSucceeded()}
- {localityText.localityImportResults()}
+ {localityText.localityUpdateResults()}
{isOpen && (
-
}
diff --git a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
index a6656eecea9..e7fef4d8f33 100644
--- a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
@@ -231,8 +231,9 @@ export const routes: RA = [
{
path: 'locality-dataset',
element: () =>
- import('../LocalityImport').then(
- ({ ImportLocalityDataSet: ImportLocalitySet }) => ImportLocalitySet
+ import('../LocalityUpdate').then(
+ ({ LocalityUpdateFromDataSet: ImportLocalitySet }) =>
+ ImportLocalitySet
),
},
],
diff --git a/specifyweb/frontend/js_src/lib/localization/header.ts b/specifyweb/frontend/js_src/lib/localization/header.ts
index 77d8e9e962a..1ef8fe59836 100644
--- a/specifyweb/frontend/js_src/lib/localization/header.ts
+++ b/specifyweb/frontend/js_src/lib/localization/header.ts
@@ -143,8 +143,8 @@ export const headerText = createDictionary({
abgeschlossen ist.
`,
},
- importLocalityDataset: {
- 'en-us': 'Import Locality Data Set',
+ localityUpdateTool: {
+ 'en-us': 'Locality Update Tool',
},
labelName: {
'en-us': 'Label Name',
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 7960b767986..9313f4e9d6b 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -292,50 +292,55 @@ export const localityText = createDictionary({
'uk-ua': 'DD MM SS.SS N/S/E/W (32 45 42.84 N)',
'de-ch': 'DD MM SS.SS N/S/O/W (32 45 42.84 N)',
},
- localityImportHeaderError: {
+ localityUpdateHeaderError: {
'en-us': 'Errors Found in Column Headers',
},
- localityImportMissingHeader: {
- 'en-us': 'The following columns are required but missing in the dataset',
+ localityUpdateMissingHeader: {
+ 'en-us': 'The following columns are required but missing in the data set',
},
- localityImportUnrecognizedHeaders: {
+ localityUpdateUnrecognizedHeaders: {
'en-us':
'The following columns in the dataset are not recognized and will be ignored on import',
},
- localityImportedAcceptedHeaders: {
+ localityUpdateAcceptedHeaders: {
'en-us': 'Only the following headers are accepted',
},
- localityImportStarting: {
- 'en-us': 'Starting Locality Data Set Import',
+ localityUpdateStarting: {
+ 'en-us': 'Starting Locality Update',
},
- localityImportParsing: {
+ localityUpdateParsing: {
'en-us': 'Parsing Locality Data Set',
},
- localityImportProgressing: {
+ localityUpdateProgressing: {
'en-us': 'Importing Locality Data Set',
},
- localityImportParsed: {
- 'en-us': 'Locality Data Set Import Parsed',
+ localityUpdateParsed: {
+ 'en-us': 'Locality Update Data Set Parsed',
},
- localityImportFailed: {
- 'en-us': 'Locality Data Set Import Failed',
+ localityUpdateFailed: {
+ 'en-us': 'Locality Update Failed',
},
- localityImportParseFailure: {
- 'en-us': 'Locality Data Set Parse Failed',
+ localityUpdateParseFailure: {
+ 'en-us': 'Locality Upadte Parsing Failed',
},
- localityImportCancelled: {
- 'en-us': 'Locality Data Set Import Cancelled',
+ localityUpdateCancelled: {
+ 'en-us': 'Locality Update Cancelled',
},
- localityImportSucceeded: {
- 'en-us': 'Locality Data Set Import Succeeded',
+ localityUpdateSucceeded: {
+ 'en-us': 'Locality Update Succeeded',
},
- localityImportWentWrong: {
- 'en-us': 'Something went wrong during the Locality Import process',
+ localityUpdateWentWrong: {
+ 'en-us': 'Something went wrong during the Locality Update process',
},
- localityImportErrorFileName: {
+ localityUpdateParseErrorFileName: {
comment:
'The file name which is used when Parse Errors are exported. The .csv file extension is appended to the end of this string',
- 'en-us': 'Locality Import Errors - {date:string}',
+ 'en-us': 'Locality Update Errors - {date:string}',
+ },
+ localityUpdateCrashFileName: {
+ comment:
+ 'The file name which is used when any Generic non-parsing errors are exported. The .txt file extension is appended to the end of this string',
+ 'en-us': 'Locality Update {taskId: string} Crash Report - {date: string}',
},
guidHeaderNotProvided: {
'en-us': "The Dataset must contain a 'guid' header",
@@ -347,22 +352,22 @@ export const localityText = createDictionary({
'en-us':
'More than one Locality found with guid: {guid:string}. Locality IDs: {localityIds: string}',
},
- localityImportEffectCounts: {
+ localityUpdateEffectCounts: {
'en-us':
- 'The following number of {localityTabelLabel: string} records will be affected by the import and {geoCoordDetailTableLabel: string} records will be created:',
+ 'The following number of {localityTabelLabel: string} records will be affected by the update and {geoCoordDetailTableLabel: string} records will be created:',
},
localityUploadedDescription: {
'en-us':
'The following number of {localityTabelLabel: string} records were updated and {geoCoordDetailTableLabel: string} records were created:',
},
- localityImportStarted: {
- 'en-us': 'The Locality Data Set Import process has started',
+ localityUpdateStarted: {
+ 'en-us': 'The Locality Update process has started',
},
- localityImportResults: {
- 'en-us': 'Locality Data Set Upload Results',
+ localityUpdateResults: {
+ 'en-us': 'Locality Update Results',
},
- localityImportFailureResults: {
- 'en-us': 'Locality Data Set Failure Results',
+ localityUpdateFailureResults: {
+ 'en-us': 'Locality Update Failure Results',
},
taskId: {
'en-us': 'Task ID',
diff --git a/specifyweb/notifications/migrations/0006_localityimport.py b/specifyweb/notifications/migrations/0006_localityupdate_localityupdaterowresult.py
similarity index 64%
rename from specifyweb/notifications/migrations/0006_localityimport.py
rename to specifyweb/notifications/migrations/0006_localityupdate_localityupdaterowresult.py
index 772ad0dfaf0..a8a8a3e4ddd 100644
--- a/specifyweb/notifications/migrations/0006_localityimport.py
+++ b/specifyweb/notifications/migrations/0006_localityupdate_localityupdaterowresult.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.2.15 on 2024-05-30 15:26
+# Generated by Django 3.2.15 on 2024-06-18 13:34
from django.conf import settings
from django.db import migrations, models
@@ -9,28 +9,40 @@
class Migration(migrations.Migration):
dependencies = [
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('specify', '__first__'),
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('notifications', '0005_auto_20240530_1512'),
]
operations = [
migrations.CreateModel(
- name='LocalityImport',
+ name='LocalityUpdate',
fields=[
- ('id', models.AutoField(db_column='LocalityImportID', primary_key=True, serialize=False, verbose_name='localityimportid')),
('taskid', models.CharField(max_length=256)),
('status', models.CharField(max_length=256)),
+ ('timestampcreated', models.DateTimeField(default=django.utils.timezone.now)),
+ ('timestampmodified', models.DateTimeField(auto_now=True)),
+ ('id', models.AutoField(db_column='LocalityUpdateID', primary_key=True, serialize=False, verbose_name='localityupdateid')),
('collection', models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection')),
- ('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.SET_NULL, to='specify.recordset')),
- ('specifyuser', models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('createdbyagent', models.ForeignKey(db_column='CreatedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
('modifiedbyagent', models.ForeignKey(db_column='ModifiedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
- ('timestampcreated', models.DateTimeField(default=django.utils.timezone.now)),
- ('timestampmodified', models.DateTimeField(auto_now=True)),
+ ('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.SET_NULL, to='specify.recordset')),
+ ('specifyuser', models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ ],
+ options={
+ 'db_table': 'localityupdate',
+ },
+ ),
+ migrations.CreateModel(
+ name='LocalityUpdateRowResult',
+ fields=[
+ ('id', models.AutoField(db_column='LocalityUpdateRowResultID', primary_key=True, serialize=False, verbose_name='localityupdaterowresultid')),
+ ('rownumber', models.IntegerField()),
+ ('result', models.JSONField()),
+ ('localityupdate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='notifications.localityupdate')),
],
options={
- 'db_table': 'localityimport',
+ 'db_table': 'localityupdaterowresult',
},
),
]
diff --git a/specifyweb/notifications/migrations/0007_auto_20240613_0348.py b/specifyweb/notifications/migrations/0007_auto_20240613_0348.py
deleted file mode 100644
index 8fa17bf85a3..00000000000
--- a/specifyweb/notifications/migrations/0007_auto_20240613_0348.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Generated by Django 3.2.15 on 2024-06-13 03:48
-
-from django.db import migrations, models
-import django.db.models.deletion
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('notifications', '0006_localityimport'),
- ]
-
- operations = [
- migrations.CreateModel(
- name='LocalityImportRowResult',
- fields=[
- ('id', models.AutoField(db_column='LocalityImportRowResultID',
- primary_key=True, serialize=False, verbose_name='localityimportrowresultid')),
- ('rownumber', models.IntegerField()),
- ('result', models.JSONField()),
- ('localityimport', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='results', to='notifications.localityimport')),
- ],
- options={
- 'db_table': 'localityimportrowresult',
- },
- ),
- ]
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index cb55e8a3a2c..e07b9e42fd3 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -1,6 +1,6 @@
from django.db import models
from django.utils import timezone
-from specifyweb.specify.models import Specifyuser, Collection, Agent, Recordset, Locality
+from specifyweb.specify.models import Specifyuser, Collection, Agent, Recordset
class Message(models.Model):
@@ -41,23 +41,23 @@ class Meta:
# managed = False
-class LocalityImport(AsyncTask):
- id = models.AutoField('localityimportid',
- primary_key=True, db_column='LocalityImportID')
+class LocalityUpdate(AsyncTask):
+ id = models.AutoField('localityupdateid',
+ primary_key=True, db_column='LocalityUpdateID')
recordset = models.ForeignKey(
Recordset, null=True, blank=True, db_column="RecordSetID", on_delete=models.SET_NULL)
class Meta:
- db_table = 'localityimport'
+ db_table = 'localityupdate'
-class LocalityImportRowResult(models.Model):
- id = models.AutoField('localityimportrowresultid',
- primary_key=True, db_column='LocalityImportRowResultID')
+class LocalityUpdateRowResult(models.Model):
+ id = models.AutoField('localityupdaterowresultid',
+ primary_key=True, db_column='LocalityUpdateRowResultID')
rownumber = models.IntegerField()
result = models.JSONField()
- localityimport = models.ForeignKey(
- LocalityImport, on_delete=models.CASCADE, related_name="results")
+ localityupdate = models.ForeignKey(
+ LocalityUpdate, on_delete=models.CASCADE, related_name="results")
class Meta:
- db_table = 'localityimportrowresult'
+ db_table = 'localityupdaterowresult'
diff --git a/specifyweb/specify/import_locality.py b/specifyweb/specify/update_locality.py
similarity index 86%
rename from specifyweb/specify/import_locality.py
rename to specifyweb/specify/update_locality.py
index 2765c0afdd3..e6ba9177145 100644
--- a/specifyweb/specify/import_locality.py
+++ b/specifyweb/specify/update_locality.py
@@ -10,7 +10,7 @@
from specifyweb.celery_tasks import LogErrorsTask, app
from specifyweb.specify.datamodel import datamodel
-from specifyweb.notifications.models import LocalityImport, LocalityImportRowResult, Message
+from specifyweb.notifications.models import LocalityUpdate, LocalityUpdateRowResult, Message
from specifyweb.specify.parse import ParseFailureKey, parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
LocalityParseErrorMessageKey = Literal[
@@ -28,9 +28,9 @@
updatable_geocoorddetail_fields = [
field.name.lower() for field in datamodel.get_table_strict('Geocoorddetail').fields]
-ImportModel = Literal['Locality', 'Geocoorddetail']
+UpdateModel = Literal['Locality', 'Geocoorddetail']
-localityimport_parse_success = {
+localityupdate_parse_success = {
"type": "array",
"items": {
"type": "object",
@@ -62,7 +62,7 @@
}
}
-localityimport_parse_error = {
+localityupdate_parse_error = {
"type": "array",
"items": {
"type": "object",
@@ -94,7 +94,7 @@
Progress = Callable[[str, int, int], None]
-class LocalityImportStatus:
+class LocalityUpdateStatus:
PENDING = 'PENDING'
PARSING = 'PARSING'
PARSED = 'PARSED'
@@ -105,24 +105,24 @@ class LocalityImportStatus:
FAILED = 'FAILED'
-class LocalityImportTask(LogErrorsTask):
+class LocalityUpdateTask(LogErrorsTask):
def on_failure(self, exc, task_id, args, kwargs, einfo):
with transaction.atomic():
- locality_import = LocalityImport.objects.get(taskid=task_id)
+ locality_update = LocalityUpdate.objects.get(taskid=task_id)
- Message.objects.create(user=locality_import.specifyuser, content=json.dumps({
- 'type': 'localityimport-failed',
+ Message.objects.create(user=locality_update.specifyuser, content=json.dumps({
+ 'type': 'localityupdate-failed',
'taskid': task_id,
'traceback': str(einfo.traceback)
}))
- locality_import.status = LocalityImportStatus.FAILED
- locality_import.save()
+ locality_update.status = LocalityUpdateStatus.FAILED
+ locality_update.save()
return super().on_failure(exc, task_id, args, kwargs, einfo)
-@app.task(base=LocalityImportTask, bind=True)
-def import_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]], create_recordset: bool) -> None:
+@app.task(base=LocalityUpdateTask, bind=True)
+def update_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]], create_recordset: bool) -> None:
def progress(state, current: int, total: int):
self.update_state(state=state, meta={
'current': current, 'total': total})
@@ -133,15 +133,15 @@ def progress(state, current: int, total: int):
results = upload_locality_set(
collection, column_headers, data, progress)
- li = resolve_localityimport_result(
+ li = resolve_localityupdate_result(
self.request.id, results, collection, create_recordset)
if results['type'] == 'ParseError':
- self.update_state(LocalityImportStatus.PARSE_FAILED, meta={
+ self.update_state(LocalityUpdateStatus.PARSE_FAILED, meta={
"errors": [error.to_json() for error in results["errors"]]})
Message.objects.create(user=li.specifyuser, content=json.dumps({
- 'type': 'localityimport-parse-failed',
+ 'type': 'localityupdate-parse-failed',
'taskid': li.taskid,
'errors': [error.to_json() for error in results["errors"]]
}))
@@ -153,11 +153,11 @@ def progress(state, current: int, total: int):
localitites.append(row["locality"])
if row["geocoorddetail"]:
geocoorddetails.append(row["geocoorddetail"])
- self.update_state(state=LocalityImportStatus.SUCCEEDED, meta={
+ self.update_state(state=LocalityUpdateStatus.SUCCEEDED, meta={
"recordsetid": recordset_id, "localities": localitites, "geocoorddetails": geocoorddetails})
Message.objects.create(user=li.specifyuser, content=json.dumps({
- 'type': 'localityimport-succeeded',
+ 'type': 'localityupdate-succeeded',
'taskid': li.taskid,
'recordsetid': recordset_id,
"localities": localitites,
@@ -168,7 +168,7 @@ def progress(state, current: int, total: int):
raise Ignore()
-@app.task(base=LocalityImportTask, bind=True)
+@app.task(base=LocalityUpdateTask, bind=True)
def parse_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]]):
def progress(state, current: int, total: int):
self.update_state(state=state, meta={
@@ -180,32 +180,32 @@ def progress(state, current: int, total: int):
to_upload, errors = parse_locality_set(
collection, column_headers, data, progress)
- li = resolve_localityimport_result(
+ li = resolve_localityupdate_result(
self.request.id, (to_upload, errors), collection)
- if li.status == LocalityImportStatus.PARSE_FAILED:
- self.update_state(LocalityImportStatus.PARSE_FAILED, meta={
+ if li.status == LocalityUpdateStatus.PARSE_FAILED:
+ self.update_state(LocalityUpdateStatus.PARSE_FAILED, meta={
"errors": [error.to_json() for error in errors]})
Message.objects.create(user=li.specifyuser, content=json.dumps({
- 'type': 'localityimport-parse-failed',
+ 'type': 'localityupdate-parse-failed',
'taskid': li.taskid,
'errors': [error.to_json() for error in errors]
}))
- elif li.status == LocalityImportStatus.PARSED:
+ elif li.status == LocalityUpdateStatus.PARSED:
localitites = len(to_upload)
geocoorddetails = 0
for parsed in to_upload:
if parsed['geocoorddetail'] is not None:
geocoorddetails += 1
- self.update_state(LocalityImportStatus.PARSED, meta={
+ self.update_state(LocalityUpdateStatus.PARSED, meta={
"localitites": localitites,
"geocoorddetails": geocoorddetails
})
Message.objects.create(user=li.specifyuser, content=json.dumps({
- 'type': 'localityimport-parse-succeeded',
+ 'type': 'localityupdate-parse-succeeded',
'taskid': li.taskid,
"localitites": localitites,
"geocoorddetails": geocoorddetails
@@ -245,12 +245,12 @@ class ParsedRow(TypedDict):
class ParseSuccess(NamedTuple):
to_upload: Dict[str, Any]
- model: ImportModel
+ model: UpdateModel
locality_id: Optional[int]
row_number: Optional[str]
@classmethod
- def from_base_parse_success(cls, parse_success: BaseParseSuccess, model: ImportModel, locality_id: Optional[int], row_number: int):
+ def from_base_parse_success(cls, parse_success: BaseParseSuccess, model: UpdateModel, locality_id: Optional[int], row_number: int):
return cls(parse_success.to_upload, model, locality_id, row_number)
@@ -270,9 +270,9 @@ class UploadParseError(TypedDict):
@transaction.atomic
-def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedRow], List[ParseError]], Union[UploadSuccess, UploadParseError]], collection, create_recordset: bool = False) -> LocalityImport:
+def resolve_localityupdate_result(taskid: str, results: Union[Tuple[List[ParsedRow], List[ParseError]], Union[UploadSuccess, UploadParseError]], collection, create_recordset: bool = False) -> LocalityUpdate:
- li = LocalityImport.objects.get(taskid=taskid)
+ li = LocalityUpdate.objects.get(taskid=taskid)
li.results.get_queryset().delete()
@@ -280,19 +280,19 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
if isinstance(results, tuple):
to_upload, errors = results
if len(errors) > 0:
- li.status = LocalityImportStatus.PARSE_FAILED
+ li.status = LocalityUpdateStatus.PARSE_FAILED
for error in errors:
result = error.to_json()
- LocalityImportRowResult.objects.create(
- localityimport=li,
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=li,
rownumber=result["rowNumber"],
result=json.dumps(result, cls=DjangoJSONEncoder)
)
else:
- li.status = LocalityImportStatus.PARSED
+ li.status = LocalityUpdateStatus.PARSED
for parsed in to_upload:
- LocalityImportRowResult.objects.create(
- localityimport=li,
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=li,
rownumber=parsed["row_number"],
result=json.dumps(parsed, cls=DjangoJSONEncoder)
)
@@ -300,29 +300,29 @@ def resolve_localityimport_result(taskid: str, results: Union[Tuple[List[ParsedR
# the results come from upload_locality_set
else:
if results['type'] == 'ParseError':
- li.status = LocalityImportStatus.PARSE_FAILED
+ li.status = LocalityUpdateStatus.PARSE_FAILED
for error in results['errors']:
result = error.to_json()
- LocalityImportRowResult.objects.create(
- localityimport=li,
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=li,
rownumber=error.row_number,
result=json.dumps(result, cls=DjangoJSONEncoder)
)
elif results['type'] == 'Uploaded':
- li.status = LocalityImportStatus.SUCCEEDED
+ li.status = LocalityUpdateStatus.SUCCEEDED
localities = []
for index, row in enumerate(results['results']):
row_number = index + 1
localities.append(row['locality'])
- LocalityImportRowResult.objects.create(
- localityimport=li,
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=li,
rownumber=row_number,
result=json.dumps(row, cls=DjangoJSONEncoder)
)
- li.recordset = create_localityimport_recordset(
+ li.recordset = create_localityupdate_recordset(
collection, li.specifyuser, localities) if create_recordset else None
li.save()
@@ -385,12 +385,12 @@ def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]]
if progress is not None:
processed += 1
- progress(LocalityImportStatus.PARSING, processed, total)
+ progress(LocalityUpdateStatus.PARSING, processed, total)
return to_upload, errors
-def parse_field(collection, table_name: ImportModel, field_name: str, field_value: str, locality_id: Optional[int], row_number: int):
+def parse_field(collection, table_name: UpdateModel, field_name: str, field_value: str, locality_id: Optional[int], row_number: int):
parsed = _parse_field(collection, table_name, field_name, field_value)
if isinstance(parsed, BaseParseFailure):
@@ -467,7 +467,7 @@ def upload_from_parsed(uploadables: List[ParsedRow], progress: Optional[Progress
if progress is not None:
processed += 1
- progress(LocalityImportStatus.PROGRESS, processed, total)
+ progress(LocalityUpdateStatus.PROGRESS, processed, total)
return {
"type": "Uploaded",
@@ -479,7 +479,7 @@ def upload_from_parsed(uploadables: List[ParsedRow], progress: Optional[Progress
DATE_FORMAT = r"%a %b %d %Y"
-def create_localityimport_recordset(collection, specifyuser, locality_ids: List[int]):
+def create_localityupdate_recordset(collection, specifyuser, locality_ids: List[int]):
locality_table_id = datamodel.get_table_strict('Locality').tableId
@@ -489,7 +489,7 @@ def create_localityimport_recordset(collection, specifyuser, locality_ids: List[
rs = spmodels.Recordset.objects.create(
collectionmemberid=collection.id,
dbtableid=locality_table_id,
- name=f"{date_as_string} Locality Import",
+ name=f"{date_as_string} Locality Update",
specifyuser=specifyuser,
type=0,
version=0
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index e97f29e00c0..1a8a6a47301 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -44,8 +44,8 @@
url(r'^localityset/', include([
url(r'^parse/$', views.parse_locality_set),
url(r'^import/$', views.upload_locality_set),
- url(r'^status/(?P[0-9a-fA-F-]+)/$', views.localityimport_status),
- url(r'^abort/(?P[0-9a-fA-F-]+)/$', views.abort_localityimport_task),
+ url(r'^status/(?P[0-9a-fA-F-]+)/$', views.localityupdate_status),
+ url(r'^abort/(?P[0-9a-fA-F-]+)/$', views.abort_localityupdate_task),
])),
# generates Sp6 master key
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index b3b9172536f..972b34bfb48 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -11,7 +11,7 @@
from django import http
from django.conf import settings
from django.db import router, transaction, connection
-from specifyweb.notifications.models import Message, Spmerging, LocalityImport
+from specifyweb.notifications.models import Message, Spmerging, LocalityUpdate
from django.db.models.deletion import Collector
from django.views.decorators.cache import cache_control
from django.views.decorators.http import require_POST
@@ -21,7 +21,7 @@
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
from specifyweb.celery_tasks import app, CELERY_TASK_STATE
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
-from specifyweb.specify.import_locality import localityimport_parse_success, localityimport_parse_error, parse_locality_set as _parse_locality_set, upload_locality_set as _upload_locality_set, create_localityimport_recordset, import_locality_task, parse_locality_task, LocalityImportStatus
+from specifyweb.specify.update_locality import localityupdate_parse_success, localityupdate_parse_error, parse_locality_set as _parse_locality_set, upload_locality_set as _upload_locality_set, create_localityupdate_recordset, update_locality_task, parse_locality_task, LocalityUpdateStatus
from . import api, models as spmodels
from .specify_jar import specify_jar
@@ -801,7 +801,7 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
"type": "string",
"enum": ["ParseError"]
},
- "errors": localityimport_parse_error
+ "errors": localityupdate_parse_error
},
"required": ["type", "errors"],
"additionalProperties": False
@@ -884,13 +884,13 @@ def start_locality_set_background(collection, specify_user, agent, column_header
args = [collection.id, column_headers, data]
if not parse_only:
args.append(create_recordset)
- task_function = parse_locality_task.apply_async if parse_only else import_locality_task.apply_async
+ task_function = parse_locality_task.apply_async if parse_only else update_locality_task.apply_async
task = task_function(args, task_id=task_id)
- LocalityImport.objects.create(
+ LocalityUpdate.objects.create(
taskid=task.id,
- status=LocalityImportStatus.PENDING,
+ status=LocalityUpdateStatus.PENDING,
collection=collection,
specifyuser=specify_user,
createdbyagent=agent,
@@ -898,7 +898,7 @@ def start_locality_set_background(collection, specify_user, agent, column_header
)
Message.objects.create(user=specify_user, content=json.dumps({
- 'type': 'localityimport-starting',
+ 'type': 'localityupdate-starting',
'taskid': task.id
}))
@@ -913,7 +913,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
localities = [row["locality"] for row in result["results"]]
- recordset = create_localityimport_recordset(
+ recordset = create_localityupdate_recordset(
collection, specify_user, localities) if create_recordset else None
result["recordsetid"] = None if recordset is None else recordset.pk
@@ -935,7 +935,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PENDING, LocalityImportStatus.ABORTED]
+ "enum": [LocalityUpdateStatus.PENDING, LocalityUpdateStatus.ABORTED]
},
"taskinfo": {
"type": "string",
@@ -949,7 +949,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PROGRESS, LocalityImportStatus.PARSING]
+ "enum": [LocalityUpdateStatus.PROGRESS, LocalityUpdateStatus.PARSING]
},
"taskinfo": {
"type": "object",
@@ -973,7 +973,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PARSED]
+ "enum": [LocalityUpdateStatus.PARSED]
},
"taskinfo": {
"type": "object",
@@ -1018,7 +1018,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.SUCCEEDED]
+ "enum": [LocalityUpdateStatus.SUCCEEDED]
},
"taskinfo": {
"type": "object",
@@ -1060,12 +1060,12 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.PARSE_FAILED]
+ "enum": [LocalityUpdateStatus.PARSE_FAILED]
},
"taskinfo": {
"type": "object",
"properties": {
- "errors": localityimport_parse_error
+ "errors": localityupdate_parse_error
}
}
},
@@ -1077,7 +1077,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"properties": {
"taskstatus": {
"type": "string",
- "enum": [LocalityImportStatus.FAILED]
+ "enum": [LocalityUpdateStatus.FAILED]
},
"taskinfo": {
"type": "object",
@@ -1100,12 +1100,12 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
}
},
"404": {
- "description": 'The localityimport object with task id was not found',
+ "description": 'The localityupdate object with task id was not found',
"content": {
"text/plain": {
"schema": {
"type": "string",
- "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ "example": "The localityupdate with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
}
}
}
@@ -1114,30 +1114,30 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
},
})
@require_GET
-def localityimport_status(request: http.HttpRequest, taskid: str):
+def localityupdate_status(request: http.HttpRequest, taskid: str):
try:
- locality_import = LocalityImport.objects.get(taskid=taskid)
- except LocalityImport.DoesNotExist:
- return http.HttpResponseNotFound(f"The localityimport with task id '{taskid}' was not found")
+ locality_import = LocalityUpdate.objects.get(taskid=taskid)
+ except LocalityUpdate.DoesNotExist:
+ return http.HttpResponseNotFound(f"The localityupdate with task id '{taskid}' was not found")
- result = import_locality_task.AsyncResult(locality_import.taskid)
+ result = update_locality_task.AsyncResult(locality_import.taskid)
- resolved_state = LocalityImportStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else LocalityImportStatus.FAILED if result.state == CELERY_TASK_STATE.FAILURE else result.state
+ resolved_state = LocalityUpdateStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else LocalityUpdateStatus.FAILED if result.state == CELERY_TASK_STATE.FAILURE else result.state
status = {
'taskstatus': resolved_state,
'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
}
- if resolved_state == LocalityImportStatus.FAILED:
+ if resolved_state == LocalityUpdateStatus.FAILED:
status["taskinfo"] = {
'error': str(result.result),
'traceback': str(result.traceback)
}
- elif locality_import.status == LocalityImportStatus.PARSE_FAILED:
+ elif locality_import.status == LocalityUpdateStatus.PARSE_FAILED:
- status["taskstatus"] = LocalityImportStatus.PARSE_FAILED
+ status["taskstatus"] = LocalityUpdateStatus.PARSE_FAILED
if isinstance(result.info, dict) and 'errors' in result.info.keys():
errors = result.info["errors"]
@@ -1147,8 +1147,8 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
status["taskinfo"] = {"errors": errors}
- elif locality_import.status == LocalityImportStatus.PARSED:
- status["taskstatus"] = LocalityImportStatus.PARSED
+ elif locality_import.status == LocalityUpdateStatus.PARSED:
+ status["taskstatus"] = LocalityUpdateStatus.PARSED
results = locality_import.results.all()
rows = [json.loads(row.result) for row in results]
@@ -1157,10 +1157,10 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
"rows": rows
}
- elif locality_import.status == LocalityImportStatus.SUCCEEDED:
- status["taskstatus"] = LocalityImportStatus.SUCCEEDED
+ elif locality_import.status == LocalityUpdateStatus.SUCCEEDED:
+ status["taskstatus"] = LocalityUpdateStatus.SUCCEEDED
recordset_id = locality_import.recordset.id if locality_import.recordset is not None else None
- if isinstance(result.info, dict) and resolved_state == LocalityImportStatus.SUCCEEDED:
+ if isinstance(result.info, dict) and resolved_state == LocalityUpdateStatus.SUCCEEDED:
result = {
"recordsetid": recordset_id,
"localities": result.info["localities"],
@@ -1212,12 +1212,12 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
},
},
"404": {
- "description": 'The localityimport object with task id was not found',
+ "description": 'The localityupdate object with task id was not found',
"content": {
"text/plain": {
"schema": {
"type": "string",
- "example": "The localityimport with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ "example": "The localityupdate with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
}
}
}
@@ -1227,13 +1227,13 @@ def localityimport_status(request: http.HttpRequest, taskid: str):
})
@require_POST
@login_maybe_required
-def abort_localityimport_task(request: http.HttpRequest, taskid: str):
+def abort_localityupdate_task(request: http.HttpRequest, taskid: str):
"Aborts the merge task currently running and matching the given merge/task ID"
try:
- locality_import = LocalityImport.objects.get(taskid=taskid)
- except LocalityImport.DoesNotExist:
- return http.HttpResponseNotFound(f"The localityimport with taskid: {taskid} is not found")
+ locality_import = LocalityUpdate.objects.get(taskid=taskid)
+ except LocalityUpdate.DoesNotExist:
+ return http.HttpResponseNotFound(f"The localityupdate with taskid: {taskid} is not found")
task = record_merge_task.AsyncResult(locality_import.taskid)
@@ -1242,14 +1242,14 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
"message": None
}
- if task.state in [LocalityImportStatus.PENDING, LocalityImportStatus.PARSING, LocalityImportStatus.PROGRESS]:
+ if task.state in [LocalityUpdateStatus.PENDING, LocalityUpdateStatus.PARSING, LocalityUpdateStatus.PROGRESS]:
app.control.revoke(locality_import.taskid, terminate=True)
- locality_import.status = LocalityImportStatus.ABORTED
+ locality_import.status = LocalityUpdateStatus.ABORTED
locality_import.save()
Message.objects.create(user=request.specify_user, content=json.dumps({
- 'type': 'localityimport-aborted',
+ 'type': 'localityupdate-aborted',
'taskid': taskid
}))
result["type"] = "ABORTED"
@@ -1303,7 +1303,7 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
"description": "Successful response returned by worker",
"content": {
"application/json": {
- "schema": localityimport_parse_success
+ "schema": localityupdate_parse_success
}
}
},
@@ -1323,7 +1323,7 @@ def abort_localityimport_task(request: http.HttpRequest, taskid: str):
"description": "Locality Import Set not parsed successfully",
"content": {
"application/json": {
- "schema": localityimport_parse_error
+ "schema": localityupdate_parse_error
}
}
}
From cfd4ec5092bf439a7c2344eb25f6e313b55d113b Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 18 Jun 2024 11:36:12 -0500
Subject: [PATCH 67/71] Allow hasPermission to check explict admin status
---
.../frontend/js_src/lib/components/Permissions/helpers.ts | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts b/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts
index f68a8a5cb39..849ad3923a0 100644
--- a/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts
@@ -5,6 +5,7 @@ import type { AnyTree } from '../DataModel/helperTypes';
import { schema } from '../DataModel/schema';
import type { LiteralField, Relationship } from '../DataModel/specifyField';
import type { Tables } from '../DataModel/types';
+import { userInformation } from '../InitialContext/userInformation';
import { toolDefinitions } from '../Security/registry';
import { tableNameToResourceName } from '../Security/utils';
import type { tableActions } from './definitions';
@@ -46,7 +47,9 @@ export const hasPermission = <
action: keyof ReturnType[number][RESOURCE],
collectionId = schema.domainLevelIds.collection
): boolean =>
- getOperationPermissions()[collectionId][resource][action]
+ resource === '%' && action === '%'
+ ? userInformation.isadmin
+ : getOperationPermissions()[collectionId][resource][action]
? true
: f.log(`No permission to ${action.toString()} ${resource}`) ?? false;
From deb10fa273a79159e060dc09cd32ef2d6568c6a6 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 18 Jun 2024 11:36:37 -0500
Subject: [PATCH 68/71] Only allow admins to use the Locality Update tool
---
.../lib/components/Header/userToolDefinitions.ts | 3 ++-
.../js_src/lib/components/LocalityUpdate/index.tsx | 5 +++--
specifyweb/specify/views.py | 10 +++++++++-
3 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
index f27bd4d392b..ab9e8ea752a 100644
--- a/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
+++ b/specifyweb/frontend/js_src/lib/components/Header/userToolDefinitions.ts
@@ -100,8 +100,9 @@ const rawUserTools = ensure>>>()({
},
},
[commonText.import()]: {
- coGeImport: {
+ localityUpdate: {
title: headerText.localityUpdateTool(),
+ enabled: () => userInformation.isadmin,
url: '/specify/import/locality-dataset/',
icon: icons.globe,
},
diff --git a/specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx
index 738524cd8f6..22799ba7dd4 100644
--- a/specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/LocalityUpdate/index.tsx
@@ -12,6 +12,7 @@ import { formatConjunction } from '../Atoms/Internationalization';
import { LoadingContext } from '../Core/Contexts';
import { CsvFilePicker } from '../Molecules/CsvFilePicker';
import { Dialog } from '../Molecules/Dialog';
+import { ProtectedAction } from '../Permissions/PermissionDenied';
import { LocalityUpdateStatus } from './Status';
import type { LocalityUpdateHeader } from './types';
import {
@@ -76,7 +77,7 @@ export function LocalityUpdateFromDataSet(): JSX.Element {
}
return (
- <>
+
handleImport(headers, data)}
/>
)}
- >
+
);
}
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 395de8fa29a..424ae3b740f 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -858,12 +858,19 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
}
}
},
+ "403": {
+ "description": "Insufficient rights to upload the Locality Data Set. Loggin in User must be an admin"
+ }
}
},
})
@login_maybe_required
@require_POST
def upload_locality_set(request: http.HttpRequest):
+
+ if not request.specify_user.is_admin():
+ return http.HttpResponseForbidden('Speciftuser must be an instituion admin')
+
request_data = json.loads(request.body)
column_headers = request_data["columnHeaders"]
@@ -1001,7 +1008,7 @@ def upload_locality_set_foreground(collection, specify_user, agent, column_heade
"type": "number"
},
"row_number": {
- "type" : "number"
+ "type": "number"
}
},
"required": ["locality", "geocoorddetail"]
@@ -1336,6 +1343,7 @@ def parse_locality_set(request: http.HttpRequest):
"""Parse a locality set without making any database changes and return the results
"""
request_data = json.loads(request.body)
+
column_headers = request_data["columnHeaders"]
data = request_data["data"]
run_in_background = request_data.get("runInBackground", False)
From 6f1731ba0b39c7d0bd19bf327fb09723cb154436 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 18 Jun 2024 13:19:22 -0500
Subject: [PATCH 69/71] Fix typo in LocalityUpdate recordset relationship
---
specifyweb/specify/datamodel.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/specify/datamodel.py b/specifyweb/specify/datamodel.py
index a795723df5f..5411088bcba 100644
--- a/specifyweb/specify/datamodel.py
+++ b/specifyweb/specify/datamodel.py
@@ -8005,7 +8005,7 @@
Field(name='timestampmodified', column='TimestampModified', indexed=False, unique=False, required=False, type='java.sql.Timestamp')
],
relationships = [
- Relationship(name='recordset', type='many-to-one', required=False, relatedModelName='Recordset', column='RecordSetID'),
+ Relationship(name='recordset', type='many-to-one', required=False, relatedModelName='RecordSet', column='RecordSetID'),
Relationship(name='collection', type='many-to-one', required=False, relatedModelName='Collection', column='CollectionID'),
Relationship(name='specifyuser', type='many-to-one', required=True, relatedModelName='SpecifyUser', column='SpecifyUserID'),
Relationship(name='createdbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='CreatedByAgentID'),
From 5b3d04e01d8ccd6d4fea6c1baa85c9f2c73299bb Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 18 Jun 2024 13:20:04 -0500
Subject: [PATCH 70/71] Provide additional data for SQLAlchemyModelTest
---
specifyweb/stored_queries/tests.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/specifyweb/stored_queries/tests.py b/specifyweb/stored_queries/tests.py
index aa15ddf70f2..f494b548652 100644
--- a/specifyweb/stored_queries/tests.py
+++ b/specifyweb/stored_queries/tests.py
@@ -803,7 +803,7 @@ class SQLAlchemyModelTest(TestCase):
def test_sqlalchemy_model_errors(self):
for table in spmodels.datamodel.tables:
table_errors = test_sqlalchemy_model(table)
- self.assertTrue(len(table_errors) == 0 or table.name in expected_errors)
+ self.assertTrue(len(table_errors) == 0 or table.name in expected_errors, f"Did not find {table.name}. Has errors: {table_errors}")
if 'not_found' in table_errors:
table_errors['not_found'] = sorted(table_errors['not_found'])
if table_errors:
From 8024c08db708a542ee6923e9bbde3c0e0644bfc2 Mon Sep 17 00:00:00 2001
From: melton-jason
Date: Tue, 18 Jun 2024 15:15:58 -0500
Subject: [PATCH 71/71] Misc bug/typo fixes
---
.../frontend/js_src/lib/components/WbImport/helpers.ts | 4 ----
.../frontend/js_src/lib/components/WbImport/index.tsx | 4 ++--
specifyweb/frontend/js_src/lib/localization/locality.ts | 2 +-
specifyweb/permissions/initialize.py | 2 +-
specifyweb/specify/calculated_fields.py | 4 +---
specifyweb/specify/management/commands/support_login.py | 2 +-
specifyweb/specify/model_extras.py | 8 +++++++-
specifyweb/specify/views.py | 2 +-
8 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
index 8115ef6a070..df6c0fc9559 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
@@ -20,10 +20,6 @@ import type { Dataset, DatasetBrief } from '../WbPlanView/Wrapped';
* and update the usages in code to fix that rule
*/
-/** Remove the extension from the file name */
-export const extractFileName = (fileName: string): string =>
- fileName.replace(/\.[^.]*$/u, '');
-
export const wbImportPreviewSize = 100;
const fileMimeMapper: IR<'csv' | 'xls'> = {
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
index a56e54ce612..9cc71cf613b 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
@@ -14,6 +14,7 @@ import { useTriggerState } from '../../hooks/useTriggerState';
import { wbText } from '../../localization/workbench';
import type { GetOrSet, RA } from '../../utils/types';
import { localized } from '../../utils/types';
+import { stripFileExtension } from '../../utils/utils';
import { Container, H2 } from '../Atoms';
import { Input } from '../Atoms/Form';
import { LoadingContext } from '../Core/Contexts';
@@ -22,7 +23,6 @@ import { CsvFilePreview } from '../Molecules/CsvFilePicker';
import { FilePicker, Layout } from '../Molecules/FilePicker';
import {
createDataSet,
- extractFileName,
getMaxDataSetLength,
inferDataSetType,
parseCsv,
@@ -50,7 +50,7 @@ export function WbImportView(): JSX.Element {
function FilePicked({ file }: { readonly file: File }): JSX.Element {
const fileType = inferDataSetType(file);
- const getSetDataSetName = useTriggerState(extractFileName(file.name));
+ const getSetDataSetName = useTriggerState(stripFileExtension(file.name));
const [hasHeader = true, setHasHeader] = useCachedState(
'wbImport',
'hasHeader'
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index ce4248df5e4..c9dd28a187d 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -321,7 +321,7 @@ export const localityText = createDictionary({
'en-us': 'Locality Update Failed',
},
localityUpdateParseFailure: {
- 'en-us': 'Locality Upadte Parsing Failed',
+ 'en-us': 'Locality Update Parsing Failed',
},
localityUpdateCancelled: {
'en-us': 'Locality Update Cancelled',
diff --git a/specifyweb/permissions/initialize.py b/specifyweb/permissions/initialize.py
index ae04ab6d683..55e20da62c5 100644
--- a/specifyweb/permissions/initialize.py
+++ b/specifyweb/permissions/initialize.py
@@ -30,7 +30,7 @@ def create_admins() -> None:
users = Specifyuser.objects.all()
for user in users:
- if user.is_admin():
+ if user.is_legacy_admin():
UserPolicy.objects.get_or_create(
collection=None,
specifyuser_id=user.id,
diff --git a/specifyweb/specify/calculated_fields.py b/specifyweb/specify/calculated_fields.py
index 84cf0d24141..45ce1490716 100644
--- a/specifyweb/specify/calculated_fields.py
+++ b/specifyweb/specify/calculated_fields.py
@@ -57,9 +57,7 @@ def calculate_extra_fields(obj, data: Dict[str, Any]) -> Dict[str, Any]:
extra["isonloan"] = obj.isonloan()
elif isinstance(obj, Specifyuser):
- extra["isadmin"] = obj.userpolicy_set.filter(
- collection=None, resource="%", action="%"
- ).exists()
+ extra["isadmin"] = obj.is_admin()
elif isinstance(obj, Collectionobject):
preparations = obj.preparations.all()
diff --git a/specifyweb/specify/management/commands/support_login.py b/specifyweb/specify/management/commands/support_login.py
index 969e53770b1..359195a2f3f 100644
--- a/specifyweb/specify/management/commands/support_login.py
+++ b/specifyweb/specify/management/commands/support_login.py
@@ -27,7 +27,7 @@ def add_arguments(self, parser):
def handle(self, **options):
if options['list']:
- def admin(user): return 'admin' if user.is_admin() else ''
+ def admin(user): return 'admin' if user.is_legacy_admin() else ''
for user in Specifyuser.objects.all():
self.stdout.write('\t'.join((user.name, user.usertype, admin(user))))
diff --git a/specifyweb/specify/model_extras.py b/specifyweb/specify/model_extras.py
index aee4abcecdd..0f3bea3cf96 100644
--- a/specifyweb/specify/model_extras.py
+++ b/specifyweb/specify/model_extras.py
@@ -54,7 +54,13 @@ def check_password(self, password):
return False
return decrypted == password
- def is_admin(self):
+ def is_admin(self):
+ "Returns true if user is a Specify 7 admin."
+ return self.userpolicy_set.filter(
+ collection=None, resource="%", action="%"
+ ).exists()
+
+ def is_legacy_admin(self):
"Returns true if user is a Specify 6 admin."
from django.db import connection
cursor = connection.cursor()
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 424ae3b740f..0abd0c45fc3 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -869,7 +869,7 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
def upload_locality_set(request: http.HttpRequest):
if not request.specify_user.is_admin():
- return http.HttpResponseForbidden('Speciftuser must be an instituion admin')
+ return http.HttpResponseForbidden('Specifyuser must be an instituion admin')
request_data = json.loads(request.body)