>;
+ readonly hasHeader: boolean;
+}): JSX.Element {
+ const { rows, header } = extractHeader(preview, hasHeader);
+
+ return (
+
+
{wbText.previewDataSet()}
+
+
+
+
+ {header.map((cell, index) => (
+ |
+ {cell}
+ |
+ ))}
+
+
+
+ {rows.map((row, index) => (
+
+ {row.map((cell, index) => (
+ |
+ {cell}
+ |
+ ))}
+
+ ))}
+
+
+
+
+ );
+}
+
+function BadImport({
+ error,
+}: {
+ readonly error: LocalizedString;
+}): JSX.Element {
+ return (
+
+ {wbText.errorImporting()}
+
+ {error}
+
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
index 7fba0088032..78ca2adfb10 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx
@@ -1,13 +1,22 @@
import React from 'react';
import type { LocalizedString } from 'typesafe-i18n';
+import { useBooleanState } from '../../hooks/useBooleanState';
+import { localityText } from '../../localization/locality';
import { mergingText } from '../../localization/merging';
import { notificationsText } from '../../localization/notifications';
import { StringToJsx } from '../../localization/utils';
-import type { IR } from '../../utils/types';
+import type { IR, RA } from '../../utils/types';
+import { Button } from '../Atoms/Button';
import { Link } from '../Atoms/Link';
import { getTable } from '../DataModel/tables';
import { userInformation } from '../InitialContext/userInformation';
+import {
+ LocalityUpdateFailed,
+ LocalityUpdateParseErrors,
+ LocalityUpdateSuccess,
+} from '../LocalityUpdate/Status';
+import type { LocalityUpdateParseError } from '../LocalityUpdate/types';
import { mergingQueryParameter } from '../Merging/queryString';
import { FormattedResource } from '../Molecules/FormattedResource';
import { TableIcon } from '../Molecules/TableIcon';
@@ -202,6 +211,114 @@ export const notificationRenderers: IR<
)
);
},
+ 'localityupdate-starting'(notification) {
+ return (
+ <>
+ {localityText.localityUpdateStarted()}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityupdate-parse-failed'(notification) {
+ const [isOpen, handleOpen, handleClose] = useBooleanState();
+ return (
+ <>
+ {localityText.localityUpdateParseFailure()}
+
+ {localityText.localityUpdateFailureResults()}
+
+ {isOpen && (
+
+ }
+ onClose={handleClose}
+ />
+ )}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityupdate-failed'(notification) {
+ const [isOpen, handleOpen, handleClose] = useBooleanState();
+ return (
+ <>
+ {localityText.localityUpdateFailed()}
+
+ {localityText.localityUpdateFailureResults()}
+
+ {isOpen && (
+
+ )}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityupdate-aborted'(notification) {
+ return (
+ <>
+ {localityText.localityUpdateCancelled()}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityupdate-parse-succeeded'(notification) {
+ return (
+ <>
+ {localityText.localityUpdateParsed()}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
+ 'localityupdate-succeeded'(notification) {
+ const [isOpen, handleOpen, handleClose] = useBooleanState();
+ return (
+ <>
+ {localityText.localityUpdateSucceeded()}
+
+ {localityText.localityUpdateResults()}
+
+ {isOpen && (
+
+ }
+ localityIds={
+ notification.payload.localities as unknown as RA
+ }
+ recordSetId={
+ notification.payload.recordsetid as unknown as number | undefined
+ }
+ onClose={handleClose}
+ />
+ )}
+
+ {localityText.taskId()}
+ {notification.payload.taskid}
+
+ >
+ );
+ },
default(notification) {
console.error('Unknown notification type', { notification });
return {JSON.stringify(notification, null, 2)};
diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts b/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts
index df9524741d2..cce7e123874 100644
--- a/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts
+++ b/specifyweb/frontend/js_src/lib/components/Notifications/__tests__/useNotificationsFetch.test.ts
@@ -4,7 +4,6 @@ import type { LocalizedString } from 'typesafe-i18n';
import { overrideAjax } from '../../../tests/ajax';
import { mockTime } from '../../../tests/helpers';
-testTime;
import { testTime } from '../../../tests/testTime';
import { formatDateForBackEnd } from '../../../utils/parser/dateFormat';
import { formatUrl } from '../../Router/queryString';
diff --git a/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts b/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts
index f68a8a5cb39..849ad3923a0 100644
--- a/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/Permissions/helpers.ts
@@ -5,6 +5,7 @@ import type { AnyTree } from '../DataModel/helperTypes';
import { schema } from '../DataModel/schema';
import type { LiteralField, Relationship } from '../DataModel/specifyField';
import type { Tables } from '../DataModel/types';
+import { userInformation } from '../InitialContext/userInformation';
import { toolDefinitions } from '../Security/registry';
import { tableNameToResourceName } from '../Security/utils';
import type { tableActions } from './definitions';
@@ -46,7 +47,9 @@ export const hasPermission = <
action: keyof ReturnType[number][RESOURCE],
collectionId = schema.domainLevelIds.collection
): boolean =>
- getOperationPermissions()[collectionId][resource][action]
+ resource === '%' && action === '%'
+ ? userInformation.isadmin
+ : getOperationPermissions()[collectionId][resource][action]
? true
: f.log(`No permission to ${action.toString()} ${resource}`) ?? false;
diff --git a/specifyweb/frontend/js_src/lib/components/PickLists/index.tsx b/specifyweb/frontend/js_src/lib/components/PickLists/index.tsx
index 6bdb593cbfb..d7d1e2d3152 100644
--- a/specifyweb/frontend/js_src/lib/components/PickLists/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/PickLists/index.tsx
@@ -85,6 +85,7 @@ export function PickListComboBox({
() => ({
value: defaultValue,
required: rawIsRequired,
+ type: 'text',
}),
[defaultValue, rawIsRequired]
)
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
index a456b6cbce0..293c84b9ff8 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Components.tsx
@@ -181,8 +181,7 @@ export function QueryButton({
}
/**
- * Create a Record Set from all query results.
- * See also `CreateRecordSet`
+ * Create a Record Set from query results.
*/
export function MakeRecordSetButton({
baseTableName,
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
index 6f50d705fe2..07bcaa8da99 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/CreateRecordSet.tsx
@@ -12,24 +12,30 @@ import {
serializeResource,
} from '../DataModel/serializers';
import { strictGetTable, tables } from '../DataModel/tables';
-import type { RecordSet, SpQuery, Tables } from '../DataModel/types';
+import type { RecordSet, Tables } from '../DataModel/types';
import { raise } from '../Errors/Crash';
import { recordSetView } from '../FormParse/webOnlyViews';
import { ResourceView } from '../Forms/ResourceView';
-import { RecordSetCreated, recordSetFromQueryLoading } from './Components';
+import { loadingBar } from '../Molecules';
+import { Dialog } from '../Molecules/Dialog';
+import { RecordSetCreated } from './Components';
/**
- * Create a record set from selected records.
+ * Renders a button to creates a record set from a group of records.
* See also `MakeRecordSetButton`
*/
export function CreateRecordSet({
- getIds,
+ recordIds,
baseTableName,
- queryResource,
+ defaultRecordSetName,
+ buttonType = 'Small',
+ saveComponent,
}: {
- readonly getIds: () => RA;
+ readonly recordIds: RA | (() => RA);
readonly baseTableName: keyof Tables;
- readonly queryResource: SpecifyResource | undefined;
+ readonly defaultRecordSetName?: string;
+ readonly buttonType?: Exclude;
+ readonly saveComponent?: () => JSX.Element;
}): JSX.Element {
const [state, setState] = React.useState<
| State<'Editing', { readonly recordSet: SpecifyResource }>
@@ -38,14 +44,21 @@ export function CreateRecordSet({
| State<'Saving'>
>({ type: 'Main' });
+ const resolvedRecordIds = React.useMemo(
+ () => (typeof recordIds === 'function' ? recordIds() : recordIds),
+ [recordIds]
+ );
+
+ const ResolvedButton = Button[buttonType];
+
return (
<>
- {
const recordSet = new tables.RecordSet.Resource();
- if (queryResource !== undefined && !queryResource.isNew())
- recordSet.set('name', queryResource.get('name'));
+ if (defaultRecordSetName !== undefined)
+ recordSet.set('name', defaultRecordSetName);
setState({
type: 'Editing',
recordSet,
@@ -55,7 +68,7 @@ export function CreateRecordSet({
{queryText.createRecordSet({
recordSetTable: tables.RecordSet.label,
})}
-
+
{state.type === 'Editing' && (
({
+ recordSetItems: f.unique(resolvedRecordIds).map((id) => ({
recordId: id,
})),
})
@@ -99,7 +112,11 @@ export function CreateRecordSet({
}}
/>
)}
- {state.type === 'Saving' && recordSetFromQueryLoading()}
+ {state.type === 'Saving'
+ ? typeof saveComponent === 'function'
+ ? saveComponent()
+ : LoadingDialog()
+ : null}
{state.type === 'Saved' && (
);
}
+
+function LoadingDialog(): JSX.Element {
+ return (
+
+ );
+}
diff --git a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
index 11d5e12d517..9a24f0b7508 100644
--- a/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
+++ b/specifyweb/frontend/js_src/lib/components/QueryBuilder/Results.tsx
@@ -26,6 +26,7 @@ import {
import { fetchPickList } from '../PickLists/fetch';
import { userPreferences } from '../Preferences/userPreferences';
import { generateMappingPathPreview } from '../WbPlanView/mappingPreview';
+import { recordSetFromQueryLoading } from './Components';
import { CreateRecordSet } from './CreateRecordSet';
import type { QueryFieldSpec } from './fieldSpec';
import type { QueryField } from './helpers';
@@ -234,14 +235,19 @@ export function QueryResults(props: QueryResultsProps): JSX.Element {
* if records were selected out of order)
*/
baseTableName={fieldSpecs[0].baseTable.name}
- getIds={(): RA =>
+ defaultRecordSetName={
+ queryResource?.isNew() ?? true
+ ? undefined
+ : queryResource?.get('name')
+ }
+ recordIds={(): RA =>
loadedResults
.filter((result) =>
selectedRows.has(result[queryIdField] as number)
)
.map((result) => result[queryIdField] as number)
}
- queryResource={queryResource}
+ saveComponent={recordSetFromQueryLoading}
/>
) : (
createRecordSet
diff --git a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
index b2ad3765208..e7fef4d8f33 100644
--- a/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
+++ b/specifyweb/frontend/js_src/lib/components/Router/Routes.tsx
@@ -225,6 +225,19 @@ export const routes: RA = [
path: 'workbench-import',
element: ,
},
+ {
+ path: 'import',
+ children: [
+ {
+ path: 'locality-dataset',
+ element: () =>
+ import('../LocalityUpdate').then(
+ ({ LocalityUpdateFromDataSet: ImportLocalitySet }) =>
+ ImportLocalitySet
+ ),
+ },
+ ],
+ },
{
path: 'resources',
title: resourcesText.appResources(),
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
index 085412ad3dc..df6c0fc9559 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/helpers.ts
@@ -1,6 +1,5 @@
import { parse } from 'csv-parse/browser/esm';
import type { LocalizedString } from 'typesafe-i18n';
-import ImportXLSWorker from 'worker-loader!./xls.worker';
import { wbText } from '../../localization/workbench';
import { ajax } from '../../utils/ajax';
@@ -21,10 +20,6 @@ import type { Dataset, DatasetBrief } from '../WbPlanView/Wrapped';
* and update the usages in code to fix that rule
*/
-/** Remove the extension from the file name */
-export const extractFileName = (fileName: string): string =>
- fileName.replace(/\.[^.]*$/u, '');
-
export const wbImportPreviewSize = 100;
const fileMimeMapper: IR<'csv' | 'xls'> = {
@@ -127,7 +122,8 @@ export const parseXls = async (
limit?: number
): Promise>> =>
new Promise((resolve, reject) => {
- const worker = new ImportXLSWorker();
+ // @ts-expect-error Specify is running with target 'esnext' with type 'module'. import.meta.url should be allowed
+ const worker = new Worker(new URL('xls.worker.ts', import.meta.url));
const dateFormat =
fullDateFormat() === databaseDateFormat ? undefined : fullDateFormat();
worker.postMessage({ file, previewSize: limit, dateFormat });
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
index baef36b11c8..9cc71cf613b 100644
--- a/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WbImport/index.tsx
@@ -10,25 +10,19 @@ import type { LocalizedString } from 'typesafe-i18n';
import { useAsyncState } from '../../hooks/useAsyncState';
import { useCachedState } from '../../hooks/useCachedState';
-import { useStateForContext } from '../../hooks/useStateForContext';
import { useTriggerState } from '../../hooks/useTriggerState';
import { wbText } from '../../localization/workbench';
-import type { GetSet, RA } from '../../utils/types';
+import type { GetOrSet, RA } from '../../utils/types';
import { localized } from '../../utils/types';
-import { Container, H2, H3 } from '../Atoms';
-import { Button } from '../Atoms/Button';
-import { Input, Select } from '../Atoms/Form';
+import { stripFileExtension } from '../../utils/utils';
+import { Container, H2 } from '../Atoms';
+import { Input } from '../Atoms/Form';
import { LoadingContext } from '../Core/Contexts';
import { useMenuItem } from '../Header/MenuContext';
-import { loadingGif } from '../Molecules';
-import type { AutoCompleteItem } from '../Molecules/AutoComplete';
-import { AutoComplete } from '../Molecules/AutoComplete';
-import { FilePicker } from '../Molecules/FilePicker';
-import { encodings } from '../WorkBench/encodings';
+import { CsvFilePreview } from '../Molecules/CsvFilePicker';
+import { FilePicker, Layout } from '../Molecules/FilePicker';
import {
createDataSet,
- extractFileName,
- extractHeader,
getMaxDataSetLength,
inferDataSetType,
parseCsv,
@@ -56,25 +50,43 @@ export function WbImportView(): JSX.Element {
function FilePicked({ file }: { readonly file: File }): JSX.Element {
const fileType = inferDataSetType(file);
+ const getSetDataSetName = useTriggerState(stripFileExtension(file.name));
+ const [hasHeader = true, setHasHeader] = useCachedState(
+ 'wbImport',
+ 'hasHeader'
+ );
return fileType === 'csv' ? (
-
+
) : (
-
+
);
}
-function CsvPicked({ file }: { readonly file: File }): JSX.Element {
- const [encoding, setEncoding] = React.useState('utf-8');
- const getSetDelimiter = useStateForContext(undefined);
- const preview = useCsvPreview(file, encoding, getSetDelimiter);
+function CsvPicked({
+ file,
+ getSetHasHeader: [hasHeader, setHasHeader],
+ getSetDataSetName: [dataSetName, setDataSetName],
+}: {
+ readonly file: File;
+ readonly getSetHasHeader: GetOrSet;
+ readonly getSetDataSetName: GetOrSet;
+}): JSX.Element {
const loading = React.useContext(LoadingContext);
const navigate = useNavigate();
return (
-
+ {
loading(
parseCsv(file, encoding, getSetDelimiter)
.then(async (data) =>
@@ -86,166 +98,11 @@ function CsvPicked({ file }: { readonly file: File }): JSX.Element {
})
)
.then(({ id }) => navigate(`/specify/workbench/${id}/`))
- )
- }
+ );
+ }}
>
-
-
-
- );
-}
-
-function useCsvPreview(
- file: File,
- encoding: string,
- getSetDelimiter: GetSet
-): LocalizedString | RA> | undefined {
- const [delimiter, setDelimiter] = getSetDelimiter;
- const [preview] = useAsyncState>>(
- React.useCallback(
- async () =>
- parseCsv(
- file,
- encoding,
- [delimiter, setDelimiter],
- wbImportPreviewSize
- ).catch((error) => localized(error.message)),
- [file, encoding, delimiter, setDelimiter]
- ),
- false
- );
- return preview;
-}
-
-function ChooseEncoding({
- encoding = '',
- isDisabled,
- onChange: handleChange,
-}: {
- readonly encoding: string;
- readonly isDisabled: boolean;
- readonly onChange: (encoding: string) => void;
-}): JSX.Element {
- return (
-
- );
-}
-
-const delimiters: RA> = [
- { label: wbText.comma(), searchValue: ',', data: ',' },
- { label: wbText.tab(), searchValue: '\t', data: '\t' },
- { label: wbText.semicolon(), searchValue: ';', data: ';' },
- { label: wbText.space(), searchValue: ' ', data: ' ' },
- { label: wbText.pipe(), searchValue: '|', data: '|' },
-];
-
-function ChooseDelimiter({
- isDisabled,
- getSetDelimiter: [delimiter, handleChange],
-}: {
- readonly isDisabled: boolean;
- readonly getSetDelimiter: GetSet;
-}): JSX.Element {
- const [state, setState] = useTriggerState(delimiter);
-
- /**
- * Don't disable the component if it is currently focused, as disabling it
- * would lead to focus loss, which is bad UX and an accessibility issue.
- */
- const inputRef = React.useRef(null);
- const isFocused = inputRef.current === document.activeElement;
- const disabled = isDisabled && !isFocused;
-
- return (
-
- );
-}
-
-function Layout({
- fileName,
- preview,
- children,
- onImport: handleImport,
-}: {
- readonly fileName: string;
- readonly preview: LocalizedString | RA> | undefined;
- readonly children?: JSX.Element | RA;
- readonly onImport: (dataSetName: string, hasHeader: boolean) => void;
-}): JSX.Element {
- const [dataSetName, setDataSetName] = useTriggerState(
- extractFileName(fileName)
- );
- const [hasHeader = true, setHasHeader] = useCachedState(
- 'wbImport',
- 'hasHeader'
- );
- return (
- <>
-
- {children}
-
-
- handleImport(dataSetName, hasHeader)}
- >
- {wbText.importFile()}
-
-
- {typeof preview === 'string' ? (
-
- ) : Array.isArray(preview) ? (
-
- ) : (
- loadingGif
- )}
- >
+
+
);
}
@@ -270,96 +127,23 @@ function ChooseName({
);
}
-function ToggleHeader({
- hasHeader,
- isDisabled,
- onChange: handleChange,
-}: {
- readonly hasHeader: boolean;
- readonly isDisabled: boolean;
- readonly onChange: (hasHeader: boolean) => void;
-}): JSX.Element {
- return (
-
- );
-}
-
-function BadImport({
- error,
+function XlsPicked({
+ file,
+ getSetHasHeader,
+ getSetDataSetName: [dataSetName, setDataSetName],
}: {
- readonly error: LocalizedString;
+ readonly file: File;
+ readonly getSetHasHeader: GetOrSet;
+ readonly getSetDataSetName: GetOrSet;
}): JSX.Element {
- return (
-
- {wbText.errorImporting()}
-
- {error}
-
- );
-}
-
-function Preview({
- preview,
- hasHeader,
-}: {
- readonly preview: RA>;
- readonly hasHeader: boolean;
-}): JSX.Element {
- const { rows, header } = extractHeader(preview, hasHeader);
-
- return (
-
-
{wbText.previewDataSet()}
-
-
-
-
- {header.map((cell, index) => (
- |
- {cell}
- |
- ))}
-
-
-
- {rows.map((row, index) => (
-
- {row.map((cell, index) => (
- |
- {cell}
- |
- ))}
-
- ))}
-
-
-
-
- );
-}
-
-function XlsPicked({ file }: { readonly file: File }): JSX.Element {
const preview = useXlsPreview(file);
const loading = React.useContext(LoadingContext);
const navigate = useNavigate();
return (
+ onFileImport={(hasHeader): void =>
loading(
parseXls(file)
.then(async (data) =>
@@ -373,7 +157,9 @@ function XlsPicked({ file }: { readonly file: File }): JSX.Element {
.then(({ id }) => navigate(`/specify/workbench/${id}/`))
)
}
- />
+ >
+
+
);
}
diff --git a/specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts b/specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts
deleted file mode 100644
index 82542e8e795..00000000000
--- a/specifyweb/frontend/js_src/lib/components/WbImport/worker.d.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-/**
- * Dummy definition for wbimportxls.worker.ts
- *
- * @module
- */
-
-declare module 'worker-loader!*' {
- class WebpackWorker extends Worker {
- constructor();
- }
-
- export default WebpackWorker;
-}
diff --git a/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx b/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx
index ea86c70b285..1c493dcdd50 100644
--- a/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WorkBench/Results.tsx
@@ -9,6 +9,7 @@ import React from 'react';
import { commonText } from '../../localization/common';
import { wbText } from '../../localization/workbench';
import { f } from '../../utils/functools';
+import type { RR, ValueOf } from '../../utils/types';
import { sortFunction } from '../../utils/utils';
import { H2, Ul } from '../Atoms';
import { Button } from '../Atoms/Button';
@@ -80,6 +81,38 @@ export function WbUploaded({
);
}
+export function TableRecordCounts({
+ recordCounts,
+ sortFunction: rawSortFunction,
+}: {
+ readonly recordCounts: Partial, number>>;
+ readonly sortFunction?: (
+ value: readonly [
+ Lowercase,
+ ValueOf, number>>>
+ ]
+ ) => ValueOf, number>>>;
+}): JSX.Element {
+ const resolvedRecords =
+ typeof rawSortFunction === 'function'
+ ? Object.entries(recordCounts).sort(sortFunction(rawSortFunction))
+ : Object.entries(recordCounts);
+
+ return (
+
+ {resolvedRecords.map(([tableName, recordCount], index) =>
+ typeof recordCount === 'number' ? (
+
+ ) : null
+ )}
+
+ );
+}
+
function TableResults({
tableName,
recordCount,
diff --git a/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx b/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx
index cc5091f30bf..1d29d167489 100644
--- a/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx
+++ b/specifyweb/frontend/js_src/lib/components/WorkBench/Status.tsx
@@ -12,6 +12,7 @@ import { Http } from '../../utils/ajax/definitions';
import { Progress } from '../Atoms';
import { Button } from '../Atoms/Button';
import { Label } from '../Atoms/Form';
+import { SECOND } from '../Atoms/timeUnits';
import { error } from '../Errors/assert';
import { softFail } from '../Errors/Crash';
import { useTitle } from '../Molecules/AppTitle';
@@ -20,7 +21,7 @@ import type { Dataset, Status } from '../WbPlanView/Wrapped';
import { RemainingLoadingTime } from './RemainingLoadingTime';
// How often to query back-end
-const REFRESH_RATE = 2000;
+const REFRESH_RATE = 2 * SECOND;
export function WbStatus({
dataset,
diff --git a/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts b/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts
index e2897a50e02..71efea0b4a3 100644
--- a/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts
+++ b/specifyweb/frontend/js_src/lib/components/WorkBench/resultsParser.ts
@@ -172,21 +172,16 @@ export type UploadResult = {
};
};
-/** Back-end sends a validation key. Front-end translates it */
-export function resolveValidationMessage(
+export function resolveBackendParsingMessage(
key: string,
payload: IR
-): LocalizedString {
+): LocalizedString | undefined {
if (key === 'failedParsingBoolean')
return backEndText.failedParsingBoolean({ value: payload.value as string });
else if (key === 'failedParsingDecimal')
return backEndText.failedParsingDecimal({ value: payload.value as string });
else if (key === 'failedParsingFloat')
return backEndText.failedParsingFloat({ value: payload.value as string });
- else if (key === 'failedParsingPickList')
- return backEndText.failedParsingPickList({
- value: `"${payload.value as string}"`,
- });
else if (key === 'failedParsingAgentType')
return backEndText.failedParsingAgentType({
agentTypeField: getField(tables.Agent, 'agentType').label,
@@ -195,12 +190,6 @@ export function resolveValidationMessage(
(payload.validTypes as RA) ?? []
),
});
- else if (key === 'pickListValueTooLong')
- return backEndText.pickListValueTooLong({
- pickListTable: tables.PickList.label,
- pickList: payload.pickList as string,
- maxLength: payload.maxLength as number,
- });
else if (key === 'valueTooLong')
return backEndText.valueTooLong({
maxLength: payload.maxLength as number,
@@ -226,6 +215,32 @@ export function resolveValidationMessage(
return backEndText.longitudeOutOfRange({
value: payload.value as string,
});
+ else if (key === 'formatMismatch')
+ return backEndText.formatMismatch({
+ value: payload.value as string,
+ formatter: payload.formatter as string,
+ });
+ else return undefined;
+}
+
+/** Back-end sends a validation key. Front-end translates it */
+export function resolveValidationMessage(
+ key: string,
+ payload: IR
+): LocalizedString {
+ const baseParsedMessage = resolveBackendParsingMessage(key, payload);
+ if (baseParsedMessage !== undefined) {
+ return baseParsedMessage;
+ } else if (key === 'failedParsingPickList')
+ return backEndText.failedParsingPickList({
+ value: `"${payload.value as string}"`,
+ });
+ else if (key === 'pickListValueTooLong')
+ return backEndText.pickListValueTooLong({
+ pickListTable: tables.PickList.label,
+ pickList: payload.pickList as string,
+ maxLength: payload.maxLength as number,
+ });
else if (key === 'invalidPartialRecord')
return backEndText.invalidPartialRecord({
column: payload.column as string,
diff --git a/specifyweb/frontend/js_src/lib/localization/backEnd.ts b/specifyweb/frontend/js_src/lib/localization/backEnd.ts
index 7a3a0de790f..a83eaeef29b 100644
--- a/specifyweb/frontend/js_src/lib/localization/backEnd.ts
+++ b/specifyweb/frontend/js_src/lib/localization/backEnd.ts
@@ -209,6 +209,9 @@ export const backEndText = createDictionary({
Längengrad muss zwischen -180 und 180 liegen. Tatsächlich: {value:string}
`,
},
+ formatMismatch: {
+ 'en-us': 'value {value:string} does not match formatter {formatter:string}',
+ },
invalidPartialRecord: {
'en-us': 'this field must be empty if {column:string} is empty',
'ru-ru': 'это поле должно быть пустым, если {column:string} пусто',
diff --git a/specifyweb/frontend/js_src/lib/localization/header.ts b/specifyweb/frontend/js_src/lib/localization/header.ts
index 41921b4cb74..cef1efd1717 100644
--- a/specifyweb/frontend/js_src/lib/localization/header.ts
+++ b/specifyweb/frontend/js_src/lib/localization/header.ts
@@ -143,6 +143,9 @@ export const headerText = createDictionary({
abgeschlossen ist.
`,
},
+ localityUpdateTool: {
+ 'en-us': 'Locality Update Tool',
+ },
labelName: {
'en-us': 'Label Name',
'ru-ru': 'Название ярлыка',
diff --git a/specifyweb/frontend/js_src/lib/localization/locality.ts b/specifyweb/frontend/js_src/lib/localization/locality.ts
index 84cf6abd79a..c9dd28a187d 100644
--- a/specifyweb/frontend/js_src/lib/localization/locality.ts
+++ b/specifyweb/frontend/js_src/lib/localization/locality.ts
@@ -292,6 +292,86 @@ export const localityText = createDictionary({
'uk-ua': 'DD MM SS.SS N/S/E/W (32 45 42.84 N)',
'de-ch': 'DD MM SS.SS N/S/O/W (32 45 42.84 N)',
},
+ localityUpdateHeaderError: {
+ 'en-us': 'Errors Found in Column Headers',
+ },
+ localityUpdateMissingHeader: {
+ 'en-us': 'The following columns are required but missing in the data set',
+ },
+ localityUpdateUnrecognizedHeaders: {
+ 'en-us':
+ 'The following columns in the dataset are not recognized and will be ignored on import',
+ },
+ localityUpdateAcceptedHeaders: {
+ 'en-us': 'Only the following headers are accepted',
+ },
+ localityUpdateStarting: {
+ 'en-us': 'Starting Locality Update',
+ },
+ localityUpdateParsing: {
+ 'en-us': 'Parsing Locality Data Set',
+ },
+ localityUpdateProgressing: {
+ 'en-us': 'Importing Locality Data Set',
+ },
+ localityUpdateParsed: {
+ 'en-us': 'Locality Update Data Set Parsed',
+ },
+ localityUpdateFailed: {
+ 'en-us': 'Locality Update Failed',
+ },
+ localityUpdateParseFailure: {
+ 'en-us': 'Locality Update Parsing Failed',
+ },
+ localityUpdateCancelled: {
+ 'en-us': 'Locality Update Cancelled',
+ },
+ localityUpdateSucceeded: {
+ 'en-us': 'Locality Update Succeeded',
+ },
+ localityUpdateWentWrong: {
+ 'en-us': 'Something went wrong during the Locality Update process',
+ },
+ localityUpdateParseErrorFileName: {
+ comment:
+ 'The file name which is used when Parse Errors are exported. The .csv file extension is appended to the end of this string',
+ 'en-us': 'Locality Update Errors - {date:string}',
+ },
+ localityUpdateCrashFileName: {
+ comment:
+ 'The file name which is used when any Generic non-parsing errors are exported. The .txt file extension is appended to the end of this string',
+ 'en-us': 'Locality Update {taskId: string} Crash Report - {date: string}',
+ },
+ guidHeaderNotProvided: {
+ 'en-us': "The Dataset must contain a 'guid' header",
+ },
+ noLocalityMatchingGuid: {
+ 'en-us': "No Locality with guid: '{guid:string}'",
+ },
+ multipleLocalitiesWithGuid: {
+ 'en-us':
+ 'More than one Locality found with guid: {guid:string}. Locality IDs: {localityIds: string}',
+ },
+ localityUpdateEffectCounts: {
+ 'en-us':
+ 'The following number of {localityTabelLabel: string} records will be affected by the update and {geoCoordDetailTableLabel: string} records will be created:',
+ },
+ localityUploadedDescription: {
+ 'en-us':
+ 'The following number of {localityTabelLabel: string} records were updated and {geoCoordDetailTableLabel: string} records were created:',
+ },
+ localityUpdateStarted: {
+ 'en-us': 'The Locality Update process has started',
+ },
+ localityUpdateResults: {
+ 'en-us': 'Locality Update Results',
+ },
+ localityUpdateFailureResults: {
+ 'en-us': 'Locality Update Failure Results',
+ },
+ taskId: {
+ 'en-us': 'Task ID',
+ },
validLatitude: {
'en-us': 'Latitude needs to have a value between -90° and 90°',
'de-ch': 'Der Breitengrad muss einen Wert zwischen -90° und 90° haben',
diff --git a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
index 6fd51a277e2..a2a4024bfd4 100644
--- a/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
+++ b/specifyweb/frontend/js_src/lib/utils/ajax/definitions.ts
@@ -13,6 +13,7 @@ export const Http = {
CONFLICT: 409,
TOO_LARGE: 413,
MISDIRECTED: 421,
+ UNPROCESSABLE: 422,
HUGE_HEADER: 431,
SERVER_ERROR: 500,
BAD_GATEWAY: 502,
@@ -47,12 +48,6 @@ export const httpCodeToErrorMessage: RR, string> = {
access to, or your session has expired. Please try logging in again, or
repeat the action as a user with more permissions
`,
- // This error code is used by the front-end when request was aborted
- [Http.MISDIRECTED]: `
- This error happened because Specify failed to send a request to the server.
- Please try again, and if the problem persists, contact your system
- administrator.
- `,
[Http.CONFLICT]: `
This error happened because the resource you tried to update has already
been modified by someone else. Please refresh the page and try again.
@@ -62,21 +57,29 @@ export const httpCodeToErrorMessage: RR, string> = {
the configured server limit. Either contact your system administrator about
increasing the limit, or try uploading a smaller file.
`,
+ // This error code is used by the front-end when request was aborted
+ [Http.MISDIRECTED]: `
+ This error happened because Specify failed to send a request to the server.
+ Please try again, and if the problem persists, contact your system
+ administrator.
+ `,
+ [Http.UNPROCESSABLE]:
+ 'This error is likely caused by a bug in Specify. Please report it.',
[Http.HUGE_HEADER]:
'Please try clearing your cookies or using a different browser.',
[Http.SERVER_ERROR]: `
This error may indicate a misconfiguration or a bug in Specify. Please
double check your configuration and report this issue.
`,
- [Http.UNAVAILABLE]: `
- This error happened because the server is overloaded or this resource is
- currently unavailable. Please try logging in again later.
- `,
[Http.BAD_GATEWAY]: `
This error likely happened because the server is down, is not yet started,
or in a process of being restarted. If this issue does not resolve after a
few minutes, contact your system administrator.
`,
+ [Http.UNAVAILABLE]: `
+ This error happened because the server is overloaded or this resource is
+ currently unavailable. Please try logging in again later.
+ `,
[Http.GATEWAY_TIMEOUT]: `
This error likely happened because the server is overloaded or you sent a
large request. Please try again later.
diff --git a/specifyweb/frontend/js_src/package-lock.json b/specifyweb/frontend/js_src/package-lock.json
index f714e1145d5..b78bffa4a87 100644
--- a/specifyweb/frontend/js_src/package-lock.json
+++ b/specifyweb/frontend/js_src/package-lock.json
@@ -75,6 +75,7 @@
"@types/underscore": "^1.10.24",
"babel-jest": "^28.1.3",
"babel-loader": "^8.2.5",
+ "babel-plugin-transform-import-meta": "^2.2.1",
"chalk": "^4.1.2",
"commander": "^9.4.1",
"core-js": "^3.23.4",
@@ -5558,6 +5559,25 @@
"@babel/core": "^7.0.0-0"
}
},
+ "node_modules/babel-plugin-transform-import-meta": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/babel-plugin-transform-import-meta/-/babel-plugin-transform-import-meta-2.2.1.tgz",
+ "integrity": "sha512-AxNh27Pcg8Kt112RGa3Vod2QS2YXKKJ6+nSvRtv7qQTJAdx0MZa4UHZ4lnxHUWA2MNbLuZQv5FVab4P1CoLOWw==",
+ "dev": true,
+ "dependencies": {
+ "@babel/template": "^7.4.4",
+ "tslib": "^2.4.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.10.0"
+ }
+ },
+ "node_modules/babel-plugin-transform-import-meta/node_modules/tslib": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz",
+ "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==",
+ "dev": true
+ },
"node_modules/babel-preset-current-node-syntax": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz",
@@ -20806,6 +20826,24 @@
"@babel/helper-define-polyfill-provider": "^0.3.1"
}
},
+ "babel-plugin-transform-import-meta": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/babel-plugin-transform-import-meta/-/babel-plugin-transform-import-meta-2.2.1.tgz",
+ "integrity": "sha512-AxNh27Pcg8Kt112RGa3Vod2QS2YXKKJ6+nSvRtv7qQTJAdx0MZa4UHZ4lnxHUWA2MNbLuZQv5FVab4P1CoLOWw==",
+ "dev": true,
+ "requires": {
+ "@babel/template": "^7.4.4",
+ "tslib": "^2.4.0"
+ },
+ "dependencies": {
+ "tslib": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz",
+ "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==",
+ "dev": true
+ }
+ }
+ },
"babel-preset-current-node-syntax": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz",
diff --git a/specifyweb/frontend/js_src/package.json b/specifyweb/frontend/js_src/package.json
index 3a58b51556a..125bec2a2e6 100644
--- a/specifyweb/frontend/js_src/package.json
+++ b/specifyweb/frontend/js_src/package.json
@@ -95,6 +95,7 @@
"@types/underscore": "^1.10.24",
"babel-jest": "^28.1.3",
"babel-loader": "^8.2.5",
+ "babel-plugin-transform-import-meta": "^2.2.1",
"chalk": "^4.1.2",
"commander": "^9.4.1",
"core-js": "^3.23.4",
diff --git a/specifyweb/notifications/migrations/0005_auto_20240530_1512.py b/specifyweb/notifications/migrations/0005_auto_20240530_1512.py
new file mode 100644
index 00000000000..d9d2b980418
--- /dev/null
+++ b/specifyweb/notifications/migrations/0005_auto_20240530_1512.py
@@ -0,0 +1,40 @@
+# Generated by Django 3.2.15 on 2024-05-29 17:06
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('notifications', '0004_rename_merge_policy'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='spmerging',
+ old_name='mergingstatus',
+ new_name='status',
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name='collection',
+ field=models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection'),
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name='specifyuser',
+ field=models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name="createdbyagent",
+ field=models.ForeignKey(null=True, db_column="CreatedByAgentID", on_delete=models.SET_NULL, to='specify.agent', related_name="+")
+ ),
+ migrations.AlterField(
+ model_name='spmerging',
+ name="modifiedbyagent",
+ field=models.ForeignKey(null=True, db_column="ModifiedByAgentID", on_delete=models.SET_NULL, to='specify.agent', related_name="+")
+ )
+ ]
diff --git a/specifyweb/notifications/migrations/0006_localityupdate_localityupdaterowresult.py b/specifyweb/notifications/migrations/0006_localityupdate_localityupdaterowresult.py
new file mode 100644
index 00000000000..6c727caae6b
--- /dev/null
+++ b/specifyweb/notifications/migrations/0006_localityupdate_localityupdaterowresult.py
@@ -0,0 +1,48 @@
+# Generated by Django 3.2.15 on 2024-06-18 13:34
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+import django.utils.timezone
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('specify', '__first__'),
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('notifications', '0005_auto_20240530_1512'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='LocalityUpdate',
+ fields=[
+ ('taskid', models.CharField(max_length=256)),
+ ('status', models.CharField(max_length=256)),
+ ('timestampcreated', models.DateTimeField(default=django.utils.timezone.now)),
+ ('timestampmodified', models.DateTimeField(auto_now=True)),
+ ('id', models.AutoField(db_column='LocalityUpdateID', primary_key=True, serialize=False, verbose_name='localityupdateid')),
+ ('collection', models.ForeignKey(db_column='CollectionID', on_delete=django.db.models.deletion.CASCADE, to='specify.collection')),
+ ('createdbyagent', models.ForeignKey(db_column='CreatedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
+ ('modifiedbyagent', models.ForeignKey(db_column='ModifiedByAgentID', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='specify.agent')),
+ ('recordset', models.ForeignKey(blank=True, db_column='RecordSetID', null=True, on_delete=django.db.models.deletion.SET_NULL, to='specify.recordset')),
+ ('specifyuser', models.ForeignKey(db_column='SpecifyUserID', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
+ ],
+ options={
+ 'db_table': 'localityupdate',
+ },
+ ),
+ migrations.CreateModel(
+ name='LocalityUpdateRowResult',
+ fields=[
+ ('id', models.AutoField(db_column='LocalityUpdateRowResultID', primary_key=True, serialize=False, verbose_name='localityupdaterowresultid')),
+ ('rownumber', models.IntegerField()),
+ ('result', models.JSONField()),
+ ('localityupdate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', db_column='LocalityUpdateID', to='notifications.localityupdate')),
+ ],
+ options={
+ 'db_table': 'localityupdaterowresult',
+ },
+ ),
+ ]
diff --git a/specifyweb/notifications/models.py b/specifyweb/notifications/models.py
index 32ac6cf334a..dcddc65e184 100644
--- a/specifyweb/notifications/models.py
+++ b/specifyweb/notifications/models.py
@@ -1,8 +1,7 @@
from django.db import models
from django.utils import timezone
from functools import partialmethod
-from specifyweb.specify.models import Collection, Specifyuser, Agent
-from ..specify.models import Specifyuser, datamodel, custom_save
+from specifyweb.specify.models import Collection, Specifyuser, Agent, Recordset, datamodel, custom_save
class Message(models.Model):
specify_model = datamodel.get_table('message')
@@ -14,26 +13,64 @@ class Message(models.Model):
save = partialmethod(custom_save)
-class Spmerging(models.Model):
+
+class AsyncTask(models.Model):
+ taskid = models.CharField(max_length=256)
+ status = models.CharField(max_length=256)
+ timestampcreated = models.DateTimeField(default=timezone.now)
+ timestampmodified = models.DateTimeField(auto_now=True)
+ specifyuser = models.ForeignKey(
+ Specifyuser, db_column='SpecifyUserID', on_delete=models.CASCADE)
+ collection = models.ForeignKey(
+ Collection, db_column="CollectionID", on_delete=models.CASCADE)
+ createdbyagent = models.ForeignKey(
+ Agent, null=True, db_column="CreatedByAgentID", on_delete=models.SET_NULL, related_name="+")
+ modifiedbyagent = models.ForeignKey(
+ Agent, null=True, db_column="ModifiedByAgentID", on_delete=models.SET_NULL, related_name="+")
+
+ class Meta:
+ abstract = True
+
+
+class Spmerging(AsyncTask):
specify_model = datamodel.get_table('spmerging')
- name = models.CharField(max_length=256)
- taskid = models.CharField(max_length=256)
- mergingstatus = models.CharField(max_length=256)
+ name = models.CharField(max_length=256)
response = models.TextField()
table = models.CharField(max_length=256)
newrecordid = models.IntegerField(null=True)
newrecordata = models.JSONField(null=True)
oldrecordids = models.JSONField(null=True)
- collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
- specifyuser = models.ForeignKey(Specifyuser, on_delete=models.CASCADE)
- timestampcreated = models.DateTimeField(default=timezone.now)
- timestampmodified = models.DateTimeField(auto_now=True)
- createdbyagent = models.ForeignKey(Agent, null=True, on_delete=models.SET_NULL, related_name="+")
- modifiedbyagent = models.ForeignKey(Agent, null=True, on_delete=models.SET_NULL, related_name="+")
+
+ save = partialmethod(custom_save)
class Meta:
db_table = 'spmerging'
# managed = False
- save = partialmethod(custom_save)
+
+class LocalityUpdate(AsyncTask):
+ specify_model = datamodel.get_table('localityupdate')
+
+ id = models.AutoField('localityupdateid',
+ primary_key=True, db_column='LocalityUpdateID')
+ recordset = models.ForeignKey(
+ Recordset, null=True, blank=True, db_column="RecordSetID", on_delete=models.SET_NULL)
+
+ class Meta:
+ db_table = 'localityupdate'
+
+
+class LocalityUpdateRowResult(models.Model):
+ specify_model = datamodel.get_table('localityupdaterowresult')
+
+
+ id = models.AutoField('localityupdaterowresultid',
+ primary_key=True, db_column='LocalityUpdateRowResultID')
+ rownumber = models.IntegerField()
+ result = models.JSONField()
+ localityupdate = models.ForeignKey(
+ LocalityUpdate, on_delete=models.CASCADE, related_name="results", db_column='LocalityUpdateID')
+
+ class Meta:
+ db_table = 'localityupdaterowresult'
diff --git a/specifyweb/permissions/initialize.py b/specifyweb/permissions/initialize.py
index ae04ab6d683..55e20da62c5 100644
--- a/specifyweb/permissions/initialize.py
+++ b/specifyweb/permissions/initialize.py
@@ -30,7 +30,7 @@ def create_admins() -> None:
users = Specifyuser.objects.all()
for user in users:
- if user.is_admin():
+ if user.is_legacy_admin():
UserPolicy.objects.get_or_create(
collection=None,
specifyuser_id=user.id,
diff --git a/specifyweb/specify/calculated_fields.py b/specifyweb/specify/calculated_fields.py
index 84cf0d24141..45ce1490716 100644
--- a/specifyweb/specify/calculated_fields.py
+++ b/specifyweb/specify/calculated_fields.py
@@ -57,9 +57,7 @@ def calculate_extra_fields(obj, data: Dict[str, Any]) -> Dict[str, Any]:
extra["isonloan"] = obj.isonloan()
elif isinstance(obj, Specifyuser):
- extra["isadmin"] = obj.userpolicy_set.filter(
- collection=None, resource="%", action="%"
- ).exists()
+ extra["isadmin"] = obj.is_admin()
elif isinstance(obj, Collectionobject):
preparations = obj.preparations.all()
diff --git a/specifyweb/specify/datamodel.py b/specifyweb/specify/datamodel.py
index f10ac2145d6..5411088bcba 100644
--- a/specifyweb/specify/datamodel.py
+++ b/specifyweb/specify/datamodel.py
@@ -7967,7 +7967,7 @@
fields=[
Field(name='name', column='Name', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
Field(name='taskid', column='TaskID', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
- Field(name='mergingstatus', column='MergingStatus', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
+ Field(name='status', column='MergingStatus', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
Field(name='response', column='Response', indexed=False, unique=False, required=False, type='text'),
Field(name='table', column='Table', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
Field(name='newrecordid', column='NewRecordID', indexed=False, unique=False, required=True, type='java.lang.Integer'),
@@ -7980,15 +7980,55 @@
],
relationships=[
- Relationship(name='collection', type='many-to-one', required=False, relatedModelName='Collection', column='Collection_ID'),
- Relationship(name='specifyuser', type='many-to-one', required=True, relatedModelName='SpecifyUser', column='SpecifyUser_ID'),
- Relationship(name='createdbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='CreatedByAgent_ID'),
- Relationship(name='modifiedbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='ModifiedByAgent_ID')
+ Relationship(name='collection', type='many-to-one', required=False, relatedModelName='Collection', column='CollectionID'),
+ Relationship(name='specifyuser', type='many-to-one', required=True, relatedModelName='SpecifyUser', column='SpecifyUserID'),
+ Relationship(name='createdbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='CreatedByAgentID'),
+ Relationship(name='modifiedbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='ModifiedByAgentID')
],
fieldAliases=[
]
),
+ Table( # LocalityUpdate
+ sp7_only = True,
+ django_app='notifications',
+ classname='edu.ku.brc.specify.datamodel.LocalityUpdate',
+ table='localityupdate',
+ tableId=1013,
+ idColumn='LocalityUpdateID',
+ idFieldName='localityUpdateId',
+ idField=IdField(name='localityUpdateId', column='LocalityUpdateID', type='java.lang.Integer'),
+ fields = [
+ Field(name='taskid', column='TaskID', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
+ Field(name='status', column='MergingStatus', indexed=False, unique=False, required=True, type='java.lang.String', length=255),
+ Field(name='timestampcreated', column='TimestampCreated', indexed=False, unique=False, required=True, type='java.sql.Timestamp'),
+ Field(name='timestampmodified', column='TimestampModified', indexed=False, unique=False, required=False, type='java.sql.Timestamp')
+ ],
+ relationships = [
+ Relationship(name='recordset', type='many-to-one', required=False, relatedModelName='RecordSet', column='RecordSetID'),
+ Relationship(name='collection', type='many-to-one', required=False, relatedModelName='Collection', column='CollectionID'),
+ Relationship(name='specifyuser', type='many-to-one', required=True, relatedModelName='SpecifyUser', column='SpecifyUserID'),
+ Relationship(name='createdbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='CreatedByAgentID'),
+ Relationship(name='modifiedbyagent', type='many-to-one', required=False, relatedModelName='Agent', column='ModifiedByAgentID')
+ ]
+ ),
+ Table( #LocalityUpdateRowResult
+ sp7_only = True,
+ django_app='notifications',
+ classname='edu.ku.brc.specify.datamodel.LocalityUpdateRowResult',
+ table='localityupdaterowresult',
+ tableId=1014,
+ idColumn='LocalityUpdateRowResultID',
+ idFieldName='localityUpdateRowResultId',
+ idField=IdField(name='localityUpdateRowResultId', column='LocalityUpdateRowResultID', type='java.lang.Integer'),
+ fields = [
+ Field(name='rownumber', column='rownumber', indexed=False, unique=False, required=True, type='java.lang.Integer'),
+ Field(name='result', column='result', indexed=False, unique=False, required=True, type='json')
+ ],
+ relationships = [
+ Relationship(name='localityupdate', type='many-to-one', required=False, dependent=True, relatedModelName='LocalityUpdate', column='LocalityUpdateID', otherSideName='results')
+ ]
+ ),
Table( # UserPolicy
sp7_only=True,
django_app='permissions',
diff --git a/specifyweb/specify/management/commands/support_login.py b/specifyweb/specify/management/commands/support_login.py
index 969e53770b1..359195a2f3f 100644
--- a/specifyweb/specify/management/commands/support_login.py
+++ b/specifyweb/specify/management/commands/support_login.py
@@ -27,7 +27,7 @@ def add_arguments(self, parser):
def handle(self, **options):
if options['list']:
- def admin(user): return 'admin' if user.is_admin() else ''
+ def admin(user): return 'admin' if user.is_legacy_admin() else ''
for user in Specifyuser.objects.all():
self.stdout.write('\t'.join((user.name, user.usertype, admin(user))))
diff --git a/specifyweb/specify/model_extras.py b/specifyweb/specify/model_extras.py
index aee4abcecdd..0f3bea3cf96 100644
--- a/specifyweb/specify/model_extras.py
+++ b/specifyweb/specify/model_extras.py
@@ -54,7 +54,13 @@ def check_password(self, password):
return False
return decrypted == password
- def is_admin(self):
+ def is_admin(self):
+ "Returns true if user is a Specify 7 admin."
+ return self.userpolicy_set.filter(
+ collection=None, resource="%", action="%"
+ ).exists()
+
+ def is_legacy_admin(self):
"Returns true if user is a Specify 6 admin."
from django.db import connection
cursor = connection.cursor()
diff --git a/specifyweb/specify/models_by_table_id.py b/specifyweb/specify/models_by_table_id.py
index 673340c656a..1083b89b00c 100644
--- a/specifyweb/specify/models_by_table_id.py
+++ b/specifyweb/specify/models_by_table_id.py
@@ -200,7 +200,9 @@
1009:'UserRole',
1010:'RolePolicy',
1011:'LibraryRolePolicy',
- 1012:'Spdataset'
+ 1012:'Spdataset',
+ 1013: 'LocalityUpdate',
+ 1014: 'LocalityUpdateRowResult'
}
model_names_by_app = {
@@ -216,7 +218,9 @@
},
'notifications': {
'Message',
- 'Spmerging'
+ 'Spmerging',
+ 'LocalityUpdate',
+ 'LocalityUpdateRowResult'
},
'permissions': {
'UserPolicy',
diff --git a/specifyweb/specify/parse.py b/specifyweb/specify/parse.py
new file mode 100644
index 00000000000..342239d4d74
--- /dev/null
+++ b/specifyweb/specify/parse.py
@@ -0,0 +1,276 @@
+import re
+import math
+
+from typing import Dict, List, Tuple, Any, NamedTuple, Union, Optional, Literal
+from datetime import datetime
+from decimal import Decimal
+
+from specifyweb.specify import models
+from specifyweb.specify.agent_types import agent_types
+from specifyweb.stored_queries.format import get_date_format, MYSQL_TO_YEAR, MYSQL_TO_MONTH
+from specifyweb.specify.datamodel import datamodel, Table, Field, Relationship
+from specifyweb.specify.uiformatters import get_uiformatter, FormatMismatch
+
+ParseFailureKey = Literal[
+'valueTooLong',
+'formatMismatch',
+
+'failedParsingDecimal',
+'failedParsingFloat',
+'failedParsingBoolean',
+'failedParsingAgentType',
+
+'invalidYear',
+'badDateFormat',
+
+'coordinateBadFormat',
+'latitudeOutOfRange',
+'longitudeOutOfRange'
+]
+
+class ParseFailure(NamedTuple):
+ message: ParseFailureKey
+ payload: Dict[str, Any]
+
+ def to_json(self) -> List:
+ return list(self)
+
+
+class ParseSucess(NamedTuple):
+ to_upload: Dict[str, Any]
+
+
+ParseResult = Union[ParseSucess, ParseFailure]
+
+
+def parse_field(collection, table_name: str, field_name: str, raw_value: str) -> ParseResult:
+ table = datamodel.get_table_strict(table_name)
+ field = table.get_field_strict(field_name)
+
+ formatter = get_uiformatter(collection, table_name, field_name)
+
+ if field.is_relationship:
+ return parse_integer(field.name, raw_value)
+
+ if formatter is not None:
+ return parse_formatted(collection, formatter, table, field, raw_value)
+
+ if is_latlong(table, field):
+ return parse_latlong(field, raw_value)
+
+ if is_agenttype(table, field):
+ return parse_agenttype(raw_value)
+
+ if field.is_temporal():
+ date_format = get_date_format() or "%Y-%m-%d"
+ return parse_date(table, field_name, date_format, raw_value)
+
+ if field.type == "java.lang.Boolean":
+ return parse_boolean(field_name, raw_value)
+
+ if field.type == 'java.math.BigDecimal':
+ return parse_decimal(field_name, raw_value)
+
+ if field.type in ('java.lang.Float', 'java.lang.Double'):
+ return parse_float(field_name, raw_value)
+
+ if field.type in ('java.lang.Integer', 'java.lang.Long', 'java.lang.Byte', 'java.lang.Short'):
+ return parse_integer(field_name, raw_value)
+
+ if hasattr(field, 'length') and field.length is not None and len(raw_value) > field.length:
+ return ParseFailure('valueTooLong', {'field': field_name, 'maxLength': field.length})
+
+ return ParseSucess({field_name.lower(): raw_value})
+
+
+def parse_string(value: str) -> Optional[str]:
+ result = value.strip()
+ if result == "":
+ return None
+ return result
+
+
+def parse_integer(field_name: str, value: str) -> ParseResult:
+ try:
+ result = int(value)
+ except ValueError as e:
+ return ParseFailure('failedParsingDecimal', {'value': value, 'field': field_name})
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_float(field_name: str, value: str) -> ParseResult:
+ try:
+ result = float(value)
+ except ValueError as e:
+ return ParseFailure('failedParsingFloat', {'value': value, 'field': field_name})
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_decimal(field_name: str, value: str) -> ParseResult:
+ try:
+ result = Decimal(value)
+ except Exception as e:
+ return ParseFailure(
+ 'failedParsingDecimal',
+ {'value': value, 'field': field_name}
+ )
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_boolean(field_name: str, value: str) -> ParseResult:
+ if value.lower() in ["yes", "true"]:
+ result = True
+ elif value.lower() in ["no", "false"]:
+ result = False
+ else:
+ return ParseFailure(
+ 'failedParsingBoolean',
+ {'value': value, 'field': field_name}
+ )
+
+ return ParseSucess({field_name.lower(): result})
+
+
+def parse_date(table: Table, field_name: str, dateformat: str, value: str) -> ParseResult:
+ if re.search('[0-9]{4}', value) is None:
+ return ParseFailure('invalidYear', {'value': value})
+
+ dateformat = dateformat.replace('%y', '%Y')
+ precision_field = table.get_field(field_name + 'precision')
+ if precision_field is None:
+ try:
+ date = datetime.strptime(value, dateformat).date()
+ except ValueError:
+ return ParseFailure('badDateFormat', {'value': value, 'format': dateformat})
+ return ParseSucess({field_name.lower(): date})
+
+ date_formats = [
+ dateformat,
+ MYSQL_TO_MONTH[dateformat],
+ MYSQL_TO_YEAR[dateformat],
+ dateformat.replace('%d', '00'),
+ re.sub('(%m)|(%d)', '00', dateformat),
+ ]
+
+ for df in date_formats:
+ try:
+ date = datetime.strptime(value, df).date()
+ except ValueError:
+ continue
+ if '%d' in df:
+ return ParseSucess({field_name.lower(): date, precision_field.name.lower(): 1})
+ elif '%m' in df or '%b' in df:
+ return ParseSucess({field_name.lower(): date.replace(day=1), precision_field.name.lower(): 2})
+ else:
+ return ParseSucess({field_name.lower(): date.replace(day=1, month=1), precision_field.name.lower(): 3})
+
+ return ParseFailure('badDateFormat', {'value': value, 'format': dateformat})
+
+
+def parse_formatted(collection, uiformatter, table: Table, field: Union[Field, Relationship], value: str) -> ParseResult:
+ try:
+ parsed = uiformatter.parse(value)
+ except FormatMismatch as e:
+ return ParseFailure('formatMismatch', {'value': e.value, 'formatter': e.formatter})
+
+ if uiformatter.needs_autonumber(parsed):
+ canonicalized = uiformatter.autonumber_now(
+ collection, getattr(models, table.django_name), parsed)
+ else:
+ canonicalized = uiformatter.canonicalize(parsed)
+
+ if hasattr(field, 'length') and len(canonicalized) > field.length:
+ return ParseFailure('valueTooLong', {'maxLength': field.length})
+
+ return ParseSucess({field.name.lower(): canonicalized})
+
+
+def parse_agenttype(value: str) -> ParseResult:
+ value = value.capitalize()
+ try:
+ agenttype = agent_types.index(value)
+ except ValueError:
+ return ParseFailure('failedParsingAgentType', {'badType': value, 'validTypes': agent_types})
+ return ParseSucess({'agenttype': agenttype})
+
+
+def is_latlong(table: Table, field: Field) -> bool:
+ return table.name == 'Locality' \
+ and field.name in ('latitude1', 'longitude1', 'latitude2', 'longitude2')
+
+
+def is_agenttype(table: Table, field: Field) -> bool:
+ return table.name == "Agent" and field.name.lower() == 'agenttype'
+
+
+def parse_latlong(field: Field, value: str) -> ParseResult:
+ parsed = parse_coord(value)
+
+ if parsed is None:
+ return ParseFailure('coordinateBadFormat', {'value': value})
+
+ coord, unit = parsed
+ if field.name.startswith('lat') and abs(coord) > 90:
+ return ParseFailure("latitudeOutOfRange", {'value': value})
+
+ if field.name.startswith('long') and abs(coord) > 180:
+ return ParseFailure('longitudeOutOfRange', {'value': value})
+
+ return ParseSucess({field.name.lower(): coord,
+ 'originallatlongunit': unit,
+ field.name.lower().replace('itude', '') + 'text': parse_string(value)})
+
+
+def parse_coord(value: str) -> Optional[Tuple[float, int]]:
+ for p in LATLONG_PARSER_DEFS:
+ match = re.compile(p.regex, re.I).match(value)
+ if match and match.group(1):
+ try:
+ # relies on signed zeros in floats
+ # see https://docs.python.org/3/library/math.html#math.copysign
+ comps = [float(match.group(i)) for i in p.comp_groups]
+ except ValueError:
+ continue
+ result, divisor = 0.0, 1
+ for comp in comps:
+ result += abs(comp) / divisor
+ divisor *= 60
+ result = math.copysign(result, comps[0])
+ if match.group(p.dir_group).lower() in ("s", "w"):
+ result = -result
+ return (result, p.unit)
+ return None
+
+
+class LatLongParserDef(NamedTuple):
+ regex: str
+ comp_groups: List[int]
+ dir_group: int
+ unit: int
+
+
+LATLONG_PARSER_DEFS = [
+ LatLongParserDef(
+ r'^(-?\d{0,3}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
+ [1],
+ 3,
+ 0
+ ),
+
+ LatLongParserDef(
+ r'^(-?\d{1,3})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
+ [1, 2],
+ 4,
+ 2
+ ),
+
+ LatLongParserDef(
+ r'^(-?\d{1,3})[^\d\.]+(\d{1,2})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
+ [1, 2, 3],
+ 5,
+ 1
+ ),
+]
diff --git a/specifyweb/specify/record_merging.py b/specifyweb/specify/record_merging.py
index 9f5f1e59146..f9313d9b322 100644
--- a/specifyweb/specify/record_merging.py
+++ b/specifyweb/specify/record_merging.py
@@ -4,7 +4,6 @@
import json
from itertools import groupby
-import re
from typing import Any, Callable, Dict, List, Optional
import traceback
@@ -12,7 +11,6 @@
from django.db import IntegrityError, transaction, models
from specifyweb.notifications.models import Message, Spmerging
from django.db.models import Q
-from django.db.models.deletion import ProtectedError
from specifyweb.businessrules.exceptions import BusinessRuleException
from specifyweb.celery_tasks import LogErrorsTask, app
@@ -384,10 +382,10 @@ def progress(cur: int, additional_total: int=0) -> None:
merge_record = Spmerging.objects.get(id=merge_id)
if response.status_code != 204:
self.update_state(state='FAILED', meta={'current': current, 'total': total})
- merge_record.mergingstatus = 'FAILED'
+ merge_record.status = 'FAILED'
else:
self.update_state(state='SUCCEEDED', meta={'current': total, 'total': total})
- merge_record.mergingstatus = 'SUCCEEDED'
+ merge_record.status = 'SUCCEEDED'
merge_record.response = response.content.decode()
merge_record.save()
diff --git a/specifyweb/specify/sp7_build_models.py b/specifyweb/specify/sp7_build_models.py
index 612f1c2c457..f53915b1f9d 100644
--- a/specifyweb/specify/sp7_build_models.py
+++ b/specifyweb/specify/sp7_build_models.py
@@ -273,4 +273,4 @@ def generate_build_model_functions_code() -> str:
def build_model_code(module, datamodel, table_name):
table = datamodel.get_table(table_name)
model_code = generate_model_class_code(table, datamodel)
- return model_code
\ No newline at end of file
+ return model_code
diff --git a/specifyweb/specify/uiformatters.py b/specifyweb/specify/uiformatters.py
index 7a1958e87d1..3de7fe8fac4 100644
--- a/specifyweb/specify/uiformatters.py
+++ b/specifyweb/specify/uiformatters.py
@@ -68,6 +68,10 @@ def get_autonumber_group_filter(model, collection, format_name: str):
return default
class FormatMismatch(ValueError):
+ def __init__(self, *args: object, value: str, formatter: str) -> None:
+ super().__init__(*args)
+ self.value = value
+ self.formatter = formatter
pass
class UIFormatter(NamedTuple):
@@ -83,7 +87,7 @@ def parse_regexp(self) -> str:
def parse(self, value: str) -> Sequence[str]:
match = re.match(self.parse_regexp(), value)
if match is None:
- raise FormatMismatch("value {} doesn't match formatter {}". format(repr(value), self.value()))
+ raise FormatMismatch("value {} doesn't match formatter {}".format(repr(value), self.value()), value=repr(value), formatter=self.value())
return match.groups()
def value(self) -> str:
diff --git a/specifyweb/specify/update_locality.py b/specifyweb/specify/update_locality.py
new file mode 100644
index 00000000000..f28818547f3
--- /dev/null
+++ b/specifyweb/specify/update_locality.py
@@ -0,0 +1,504 @@
+import json
+import traceback
+
+from typing import get_args as get_typing_args, Any, Dict, List, Tuple, Literal, Optional, NamedTuple, Union, Callable, TypedDict
+from datetime import datetime
+from django.db import transaction
+from django.core.serializers.json import DjangoJSONEncoder
+from celery.exceptions import Ignore
+
+import specifyweb.specify.models as spmodels
+
+from specifyweb.celery_tasks import LogErrorsTask, app
+from specifyweb.specify.datamodel import datamodel
+from specifyweb.notifications.models import LocalityUpdate, LocalityUpdateRowResult, Message
+from specifyweb.specify.parse import ParseFailureKey, parse_field as _parse_field, ParseFailure as BaseParseFailure, ParseSucess as BaseParseSuccess
+
+LocalityParseErrorMessageKey = Literal[
+ 'guidHeaderNotProvided',
+ 'noLocalityMatchingGuid',
+ 'multipleLocalitiesWithGuid',
+]
+
+# constructs a list with the string literals defined in the
+# base ParseFailureKey and LocalityParseErrorMessageKey types
+localityParseErrorMessages: List[LocalityParseErrorMessageKey] = list(
+ set(get_typing_args(LocalityParseErrorMessageKey)) | set(get_typing_args(ParseFailureKey)))
+
+updatable_locality_fields = ['latitude1', 'longitude1', 'datum']
+updatable_geocoorddetail_fields = [
+ field.name.lower() for field in datamodel.get_table_strict('Geocoorddetail').fields]
+
+UpdateModel = Literal['Locality', 'Geocoorddetail']
+
+localityupdate_parse_success = {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "locality": {
+ "type": "object"
+ },
+ "geocoorddetail": {
+ "oneOf": [
+ {
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ]
+ },
+ "locality_id": {
+ "type": "number",
+ "minimum": 0
+ },
+ "row_number": {
+ "type": "number",
+ "minimum": 1
+ }
+ },
+ "required": ["locality", "geocoorddetail", "locality_id", "row_number"],
+ "additionalProperties": False
+ }
+}
+
+localityupdate_parse_error = {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "description": "Keys for errors which occured during parsing",
+ "type": "string",
+ "enum": localityParseErrorMessages
+ },
+ "field": {
+ "description": "The field name which had the parsing error",
+ "type": "string"
+ },
+ "payload": {
+ "description": "An object containing data relating to the error",
+ "type": "object",
+ "example": {'badType': 'Preson', 'validTypes': ['Organization', 'Person', 'Other', 'Group']}
+ },
+ "rowNumber": {
+ "type": "integer",
+ "minimum": 1
+ }
+ },
+ "required": ["message", "field", "payload", "rowNumber"],
+ "additionalProperties": False
+ }
+}
+
+Progress = Callable[[str, int, int], None]
+
+
+class LocalityUpdateStatus:
+ PENDING = 'PENDING'
+ PARSING = 'PARSING'
+ PARSED = 'PARSED'
+ PROGRESS = 'PROGRESS'
+ SUCCEEDED = 'SUCCEEDED'
+ ABORTED = 'ABORTED'
+ PARSE_FAILED = 'PARSE_FAILED'
+ FAILED = 'FAILED'
+
+
+class LocalityUpdateTask(LogErrorsTask):
+ def on_failure(self, exc, task_id, args, kwargs, einfo):
+ # with transaction.atomic():
+ # locality_update = LocalityUpdate.objects.get(taskid=task_id)
+
+ # Message.objects.create(user=locality_update.specifyuser, content=json.dumps({
+ # 'type': 'localityupdate-failed',
+ # 'taskid': task_id,
+ # 'traceback': str(einfo.traceback)
+ # }))
+ # locality_update.status = LocalityUpdateStatus.FAILED
+ # locality_update.save()
+
+ return super().on_failure(exc, task_id, args, kwargs, einfo)
+
+
+@app.task(base=LocalityUpdateTask, bind=True)
+def update_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]], create_recordset: bool) -> None:
+ def progress(state, current: int, total: int):
+ self.update_state(state=state, meta={
+ 'current': current, 'total': total})
+
+ collection = spmodels.Collection.objects.get(id=collection_id)
+
+ with transaction.atomic():
+ results = upload_locality_set(
+ collection, column_headers, data, progress)
+
+ lu = resolve_localityupdate_result(
+ self.request.id, results, collection, create_recordset)
+
+ if results['type'] == 'ParseError':
+ self.update_state(LocalityUpdateStatus.PARSE_FAILED, meta={
+ "errors": [error.to_json() for error in results["errors"]]})
+
+ Message.objects.create(user=lu.specifyuser, content=json.dumps({
+ 'type': 'localityupdate-parse-failed',
+ 'taskid': lu.taskid,
+ 'errors': [error.to_json() for error in results["errors"]]
+ }))
+ elif results['type'] == 'Uploaded':
+ recordset_id = None if lu.recordset is None else lu.recordset.pk
+ localitites = []
+ geocoorddetails = []
+ for row in results["results"]:
+ localitites.append(row["locality"])
+ if row["geocoorddetail"]:
+ geocoorddetails.append(row["geocoorddetail"])
+ self.update_state(state=LocalityUpdateStatus.SUCCEEDED, meta={
+ "recordsetid": recordset_id, "localities": localitites, "geocoorddetails": geocoorddetails})
+
+ Message.objects.create(user=lu.specifyuser, content=json.dumps({
+ 'type': 'localityupdate-succeeded',
+ 'taskid': lu.taskid,
+ 'recordsetid': recordset_id,
+ "localities": localitites,
+ "geocoorddetails": geocoorddetails
+ }))
+
+ # prevent Celery from overriding the State of the Task
+ raise Ignore()
+
+
+@app.task(base=LocalityUpdateTask, bind=True)
+def parse_locality_task(self, collection_id: int, column_headers: List[str], data: List[List[str]]):
+ def progress(state, current: int, total: int):
+ self.update_state(state=state, meta={
+ 'current': current, 'total': total})
+
+ collection = spmodels.Collection.objects.get(id=collection_id)
+
+ with transaction.atomic():
+ to_upload, errors = parse_locality_set(
+ collection, column_headers, data, progress)
+
+ lu = resolve_localityupdate_result(
+ self.request.id, (to_upload, errors), collection)
+
+ if lu.status == LocalityUpdateStatus.PARSE_FAILED:
+ self.update_state(LocalityUpdateStatus.PARSE_FAILED, meta={
+ "errors": [error.to_json() for error in errors]})
+
+ Message.objects.create(user=lu.specifyuser, content=json.dumps({
+ 'type': 'localityupdate-parse-failed',
+ 'taskid': lu.taskid,
+ 'errors': [error.to_json() for error in errors]
+ }))
+
+ elif lu.status == LocalityUpdateStatus.PARSED:
+ localitites = len(to_upload)
+ geocoorddetails = 0
+ for parsed in to_upload:
+ if parsed['geocoorddetail'] is not None:
+ geocoorddetails += 1
+
+ self.update_state(LocalityUpdateStatus.PARSED, meta={
+ "localitites": localitites,
+ "geocoorddetails": geocoorddetails
+ })
+ Message.objects.create(user=lu.specifyuser, content=json.dumps({
+ 'type': 'localityupdate-parse-succeeded',
+ 'taskid': lu.taskid,
+ "localitites": localitites,
+ "geocoorddetails": geocoorddetails
+ }))
+
+ # prevent Celery from overriding the State of the Task
+ raise Ignore()
+
+
+class JSONParseError(TypedDict):
+ message: str
+ field: str
+ payload: Dict[str, Any]
+ rowNumber: int
+
+
+class ParseError(NamedTuple):
+ message: Union[ParseFailureKey, LocalityParseErrorMessageKey]
+ field: Optional[str]
+ payload: Optional[Dict[str, Any]]
+ row_number: Optional[int]
+
+ @classmethod
+ def from_parse_failure(cls, parse_failure: BaseParseFailure, field: str, row_number: int):
+ return cls(message=parse_failure.message, field=field, payload=parse_failure.payload, row_number=row_number)
+
+ def to_json(self) -> JSONParseError:
+ return {"message": self.message, "field": self.field, "payload": self.payload, "rowNumber": self.row_number}
+
+
+class ParsedRow(TypedDict):
+ row_number: int
+ locality: Dict[str, Any]
+ geocoorddetail: Optional[Dict[str, Any]]
+ locality_id: int
+
+
+class ParseSuccess(NamedTuple):
+ to_upload: Dict[str, Any]
+ model: UpdateModel
+ locality_id: Optional[int]
+ row_number: Optional[str]
+
+ @classmethod
+ def from_base_parse_success(cls, parse_success: BaseParseSuccess, model: UpdateModel, locality_id: Optional[int], row_number: int):
+ return cls(parse_success.to_upload, model, locality_id, row_number)
+
+
+class UploadSuccessRow(TypedDict):
+ locality: int
+ geocoorddetail: Optional[int]
+
+
+class UploadSuccess(TypedDict):
+ type: Literal["Uploaded"]
+ results: List[UploadSuccessRow]
+
+
+class UploadParseError(TypedDict):
+ type: Literal["ParseError"]
+ errors: List[ParseError]
+
+
+@transaction.atomic
+def resolve_localityupdate_result(taskid: str, results: Union[Tuple[List[ParsedRow], List[ParseError]], Union[UploadSuccess, UploadParseError]], collection, create_recordset: bool = False) -> LocalityUpdate:
+
+ lu = LocalityUpdate.objects.get(taskid=taskid)
+
+ lu.results.get_queryset().delete()
+
+ # the results come from parse_locality_set
+ if isinstance(results, tuple):
+ to_upload, errors = results
+ if len(errors) > 0:
+ lu.status = LocalityUpdateStatus.PARSE_FAILED
+ for error in errors:
+ result = error.to_json()
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=lu,
+ rownumber=result["rowNumber"],
+ result=json.dumps(result, cls=DjangoJSONEncoder)
+ )
+ else:
+ lu.status = LocalityUpdateStatus.PARSED
+ for parsed in to_upload:
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=lu,
+ rownumber=parsed["row_number"],
+ result=json.dumps(parsed, cls=DjangoJSONEncoder)
+ )
+
+ # the results come from upload_locality_set
+ else:
+ if results['type'] == 'ParseError':
+ lu.status = LocalityUpdateStatus.PARSE_FAILED
+ for error in results['errors']:
+ result = error.to_json()
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=lu,
+ rownumber=error.row_number,
+ result=json.dumps(result, cls=DjangoJSONEncoder)
+ )
+
+ elif results['type'] == 'Uploaded':
+ lu.status = LocalityUpdateStatus.SUCCEEDED
+ localities = []
+ for index, row in enumerate(results['results']):
+ row_number = index + 1
+ localities.append(row['locality'])
+
+ LocalityUpdateRowResult.objects.create(
+ localityupdate=lu,
+ rownumber=row_number,
+ result=json.dumps(row, cls=DjangoJSONEncoder)
+ )
+
+ lu.recordset = create_localityupdate_recordset(
+ collection, lu.specifyuser, localities) if create_recordset else None
+
+ lu.save()
+
+ return lu
+
+
+def parse_locality_set(collection, raw_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Tuple[List[ParsedRow], List[ParseError]]:
+ errors: List[ParseError] = []
+ to_upload: List[ParsedRow] = []
+
+ headers = [header.strip().lower() for header in raw_headers]
+
+ if 'guid' not in headers:
+ errors.append(ParseError(message='guidHeaderNotProvided',
+ field=None, payload=None, row_number=None))
+ return to_upload, errors
+
+ guid_index = headers.index('guid')
+ updatable_locality_fields_index = [{'field': field, 'index': headers.index(
+ field)} for field in headers if field.lower() in updatable_locality_fields]
+
+ geocoorddetail_fields_index = [{'field': field, 'index': headers.index(
+ field)} for field in headers if field.lower() in updatable_geocoorddetail_fields]
+
+ processed = 0
+ total = len(data)
+
+ for row_number, row in enumerate(data, start=1):
+ guid = row[guid_index]
+ locality_query = spmodels.Locality.objects.filter(guid=guid)
+ if len(locality_query) == 0:
+ errors.append(ParseError(message='noLocalityMatchingGuid', field='guid',
+ payload={'guid': guid}, row_number=row_number))
+
+ if len(locality_query) > 1:
+ errors.append(ParseError(message='multipleLocalitiesWithGuid', field=None, payload={'guid': guid, 'localityIds': list(
+ locality.id for locality in locality_query)}, row_number=row_number))
+
+ locality_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
+ for dict in updatable_locality_fields_index]
+
+ geocoorddetail_values = [{'field': dict['field'], 'value': row[dict['index']].strip()}
+ for dict in geocoorddetail_fields_index]
+
+ locality_id: Optional[int] = None if len(
+ locality_query) != 1 else locality_query[0].id
+
+ parsed_locality_fields = [parse_field(
+ collection, 'Locality', dict['field'], dict['value'], locality_id, row_number) for dict in locality_values if dict['value'].strip() != ""]
+
+ parsed_geocoorddetail_fields = [parse_field(
+ collection, 'Geocoorddetail', dict["field"], dict['value'], locality_id, row_number) for dict in geocoorddetail_values if dict['value'].strip() != ""]
+
+ parsed_row, parsed_errors = merge_parse_results(
+ [*parsed_locality_fields, *parsed_geocoorddetail_fields], locality_id, row_number)
+
+ errors.extend(parsed_errors)
+ to_upload.append(parsed_row)
+
+ if progress is not None:
+ processed += 1
+ progress(LocalityUpdateStatus.PARSING, processed, total)
+
+ return to_upload, errors
+
+
+def parse_field(collection, table_name: UpdateModel, field_name: str, field_value: str, locality_id: Optional[int], row_number: int):
+ parsed = _parse_field(collection, table_name, field_name, field_value)
+
+ if isinstance(parsed, BaseParseFailure):
+ return ParseError.from_parse_failure(parsed, field_name, row_number)
+ else:
+ return ParseSuccess.from_base_parse_success(parsed, table_name, locality_id, row_number)
+
+
+def merge_parse_results(results: List[Union[ParseSuccess, ParseError]], locality_id: int, row_number: int) -> Tuple[ParsedRow, List[ParseError]]:
+ to_upload: ParsedRow = {
+ "locality_id": locality_id,
+ "row_number": row_number,
+ "locality": {},
+ "geocoorddetail": {}
+ }
+ errors = []
+ for result in results:
+ if isinstance(result, ParseError):
+ errors.append(result)
+ else:
+ to_upload[result.model.lower()].update(result.to_upload)
+
+ if len(to_upload['geocoorddetail']) == 0:
+ to_upload['geocoorddetail'] = None
+
+ return to_upload, errors
+
+
+def upload_locality_set(collection, column_headers: List[str], data: List[List[str]], progress: Optional[Progress] = None) -> Union[UploadSuccess, UploadParseError]:
+ to_upload, errors = parse_locality_set(
+ collection, column_headers, data, progress)
+
+ if len(errors) > 0:
+ return {
+ "type": "ParseError",
+ "errors": errors
+ }
+
+ return upload_from_parsed(to_upload, progress)
+
+
+def upload_from_parsed(uploadables: List[ParsedRow], progress: Optional[Progress] = None) -> UploadSuccess:
+ processed = 0
+ total = len(uploadables)
+
+ uploaded: List[UploadSuccessRow] = [
+ {"locality": None, "geocoorddetail": None} for _ in range(total)]
+
+ with transaction.atomic():
+ for parsed_row in uploadables:
+ locality_id = parsed_row["locality_id"]
+
+ if locality_id is None:
+ raise ValueError(
+ f"No matching Locality found on row {parsed_row['row_number']}")
+
+ locality = spmodels.Locality.objects.get(id=locality_id)
+
+ # Queryset.update() is not used here as it does not send pre/post save signals
+ for field, value in parsed_row['locality'].items():
+ setattr(locality, field, value)
+ locality.save()
+ uploaded[parsed_row['row_number'] -
+ 1]["locality"] = locality_id
+
+ if parsed_row['geocoorddetail'] is not None:
+ locality.geocoorddetails.get_queryset().delete()
+ geoCoordDetail = spmodels.Geocoorddetail.objects.create(
+ **parsed_row['geocoorddetail'])
+ geoCoordDetail.locality = locality
+ geoCoordDetail.save()
+ uploaded[parsed_row["row_number"] -
+ 1]["geocoorddetail"] = geoCoordDetail.pk
+
+ if progress is not None:
+ processed += 1
+ progress(LocalityUpdateStatus.PROGRESS, processed, total)
+
+ return {
+ "type": "Uploaded",
+ "results": uploaded
+ }
+
+
+# Example: Wed Jun 07 2023
+DATE_FORMAT = r"%a %b %d %Y"
+
+
+def create_localityupdate_recordset(collection, specifyuser, locality_ids: List[int]):
+
+ locality_table_id = datamodel.get_table_strict('Locality').tableId
+
+ date_as_string = datetime.now().strftime(DATE_FORMAT)
+
+ with transaction.atomic():
+ rs = spmodels.Recordset.objects.create(
+ collectionmemberid=collection.id,
+ dbtableid=locality_table_id,
+ name=f"{date_as_string} Locality Update",
+ specifyuser=specifyuser,
+ type=0,
+ version=0
+ )
+ for locality_id in locality_ids:
+ spmodels.Recordsetitem.objects.create(
+ recordid=locality_id,
+ recordset=rs
+ )
+
+ return rs
diff --git a/specifyweb/specify/urls.py b/specifyweb/specify/urls.py
index aa142071550..1a8a6a47301 100644
--- a/specifyweb/specify/urls.py
+++ b/specifyweb/specify/urls.py
@@ -40,6 +40,14 @@
url(r'^repair/$', tree_views.repair_tree),
])),
+ # locality set import endpoints
+ url(r'^localityset/', include([
+ url(r'^parse/$', views.parse_locality_set),
+ url(r'^import/$', views.upload_locality_set),
+ url(r'^status/(?P[0-9a-fA-F-]+)/$', views.localityupdate_status),
+ url(r'^abort/(?P[0-9a-fA-F-]+)/$', views.abort_localityupdate_task),
+ ])),
+
# generates Sp6 master key
url(r'^master_key/$', master_key.master_key),
diff --git a/specifyweb/specify/views.py b/specifyweb/specify/views.py
index 345ab7ac656..0abd0c45fc3 100644
--- a/specifyweb/specify/views.py
+++ b/specifyweb/specify/views.py
@@ -5,14 +5,13 @@
import json
import mimetypes
from functools import wraps
-from typing import Callable, Union
+from typing import Union, List, Tuple, Dict, Any
from uuid import uuid4
from django import http
from django.conf import settings
from django.db import router, transaction, connection
-from specifyweb.notifications.models import Message, Spmerging
-from django.db.models import Q
+from specifyweb.notifications.models import Message, Spmerging, LocalityUpdate
from django.db.models.deletion import Collector
from django.views.decorators.cache import cache_control
from django.views.decorators.http import require_POST
@@ -20,14 +19,11 @@
from specifyweb.middleware.general import require_GET, require_http_methods
from specifyweb.permissions.permissions import PermissionTarget, \
PermissionTargetAction, PermissionsException, check_permission_targets, table_permissions_checker
-from specifyweb.celery_tasks import app
+from specifyweb.celery_tasks import app, CELERY_TASK_STATE
from specifyweb.specify.record_merging import record_merge_fx, record_merge_task, resolve_record_merge_response
-from specifyweb.specify.tree_extras import is_instance_of_tree_def_item
+from specifyweb.specify.update_locality import localityupdate_parse_success, localityupdate_parse_error, parse_locality_set as _parse_locality_set, upload_locality_set as _upload_locality_set, create_localityupdate_recordset, update_locality_task, parse_locality_task, LocalityUpdateStatus
from . import api, models as spmodels
-from .build_models import orderings
from .specify_jar import specify_jar
-from celery.utils.log import get_task_logger # type: ignore
-logger = get_task_logger(__name__)
def login_maybe_required(view):
@@ -92,6 +88,7 @@ def raise_error(request):
raise Exception('This error is a test. You may now return to your regularly '
'scheduled hacking.')
+
@login_maybe_required
@require_http_methods(['GET', 'HEAD'])
def delete_blockers(request, model, id):
@@ -522,18 +519,18 @@ def record_merge(
if background:
# Check if another merge is still in progress
- cur_merges = Spmerging.objects.filter(mergingstatus='MERGING')
+ cur_merges = Spmerging.objects.filter(status='MERGING')
for cur_merge in cur_merges:
cur_task_id = cur_merge.taskid
cur_result = record_merge_task.AsyncResult(cur_task_id)
if cur_result is not None:
- cur_merge.mergingstatus = 'FAILED'
+ cur_merge.status = 'FAILED'
cur_merge.save()
elif cur_result.state == 'MERGING':
return http.HttpResponseNotAllowed(
'Another merge process is still running on the system, please try again later.')
else:
- cur_merge.mergingstatus = cur_result.state
+ cur_merge.status = cur_result.state
cur_merge.save()
# Create task id and a Spmerging record
@@ -541,7 +538,7 @@ def record_merge(
merge = Spmerging.objects.create(
name="Merge_" + model_name + "_" + new_model_id,
taskid=task_id,
- mergingstatus="MERGING",
+ status="MERGING",
table=model_name.title(),
newrecordid=new_model_id,
newrecordata=json.dumps(new_record_data),
@@ -604,17 +601,6 @@ def record_merge(
return response
-CELERY_MERGE_STATUS_MAP = {
- 'PENDING': 'PENDING',
- 'STARTED': 'MERGING',
- 'SUCCESS': 'SUCCEEDED',
- 'FAILURE': 'FAILED',
- 'RETRY': 'MERGING',
- 'REVOKED': 'FAILED',
- 'REJECTED': 'FAILED'
-}
-
-
@openapi(schema={
'get': {
"responses": {
@@ -667,6 +653,9 @@ def record_merge(
}
}
},
+ '404': {
+ 'description': 'The spmerging object with task id was not found',
+ },
}
},
})
@@ -680,23 +669,10 @@ def merging_status(request, merge_id: int) -> http.HttpResponse:
except Spmerging.DoesNotExist:
return http.HttpResponseNotFound(f'The merge task id is not found: {merge_id}')
- task_status = merge.mergingstatus
- task_progress = None
-
- try:
- result = record_merge_task.AsyncResult(merge.taskid)
- task_progress = result.info if isinstance(
- result.info, dict) else repr(result.info)
-
- # Update task status if necessary
- if result.state not in ['PENDING', 'STARTED', 'SUCCESS', 'RETRY']:
- task_status = CELERY_MERGE_STATUS_MAP.get(
- result.state, task_status)
- except Exception:
- pass
+ result = record_merge_task.AsyncResult(merge.taskid)
status = {
- 'taskstatus': merge.mergingstatus,
+ 'taskstatus': merge.status,
'response': merge.response,
'taskprogress': result.info if isinstance(result.info, dict) else repr(result.info),
'taskid': merge.taskid
@@ -751,7 +727,7 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
app.control.revoke(merge.taskid, terminate=True)
# Updating the merging status
- merge.mergingstatus = 'ABORTED'
+ merge.status = 'ABORTED'
merge.save()
# Send notification the the megre task has been aborted
@@ -768,3 +744,623 @@ def abort_merge_task(request, merge_id: int) -> http.HttpResponse:
else:
return http.HttpResponse(f'Task {merge.taskid} is not running and cannot be aborted.')
+
+
+@openapi(schema={
+ 'post': {
+ "requestBody": {
+ "required": True,
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "columnHeaders": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "createRecordSet": {
+ "type": "boolean",
+ "description": "When True, creates a recordset in the logged-in collection for the logged-in user with the matched/updated localities if the upload succeeds",
+ "default": True
+ },
+ "runInBackground": {
+ "type": "boolean",
+ "description": "Whether the task should be ran in the background. Defaults to True",
+ "default": False
+ }
+ },
+ "required": ["columnHeaders", "data"],
+ "additionalProperties": False
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Task finished synchronously",
+ "content": {
+ "application/json": {
+ "schema": {
+ "oneOf": [
+ {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["ParseError"]
+ },
+ "errors": localityupdate_parse_error
+ },
+ "required": ["type", "errors"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["Uploaded"]
+ },
+ "recordsetid": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ]
+ },
+ "localities": {
+ "type": "array",
+ "description": "An array of matched/updated Locality IDs",
+ "items": {
+ "type": "number"
+ }
+ },
+ "geocoorddetails": {
+ "type": "array",
+ "description": "An array of created GeoCoordDetail IDs",
+ "items": {
+ "type": "number"
+ }
+ }
+ },
+ "required": ["type", "recordsetid", "localities", "geocoorddetails"],
+ "additionalProperties": False
+ }
+ ]
+ }
+ }
+ }
+ },
+ "201": {
+ "description": "Task started by the worker. Returns the newly created ID of the task",
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
+ }
+ }
+ }
+ },
+ "403": {
+ "description": "Insufficient rights to upload the Locality Data Set. Loggin in User must be an admin"
+ }
+ }
+ },
+})
+@login_maybe_required
+@require_POST
+def upload_locality_set(request: http.HttpRequest):
+
+ if not request.specify_user.is_admin():
+ return http.HttpResponseForbidden('Specifyuser must be an instituion admin')
+
+ request_data = json.loads(request.body)
+
+ column_headers = request_data["columnHeaders"]
+ data = request_data["data"]
+ create_recordset = request_data.get("createRecordSet", True)
+ run_in_background = request_data.get("runInBackground", False)
+
+ resolved_upload_function = start_locality_set_background if run_in_background else upload_locality_set_foreground
+
+ result = resolved_upload_function(request.specify_collection, request.specify_user,
+ request.specify_user_agent, column_headers, data, create_recordset)
+
+ return http.JsonResponse(result, status=201 if run_in_background else 200, safe=False)
+
+
+def start_locality_set_background(collection, specify_user, agent, column_headers: List[str], data: List[List[str]], create_recordset: bool = False, parse_only: bool = False) -> str:
+ task_id = str(uuid4())
+ args = [collection.id, column_headers, data]
+ if not parse_only:
+ args.append(create_recordset)
+ task_function = parse_locality_task.apply_async if parse_only else update_locality_task.apply_async
+
+ task = task_function(args, task_id=task_id)
+
+ lu = LocalityUpdate.objects.create(
+ taskid=task.id,
+ status=LocalityUpdateStatus.PENDING,
+ collection=collection,
+ specifyuser=specify_user,
+ createdbyagent=agent,
+ modifiedbyagent=agent,
+ )
+
+ Message.objects.create(user=specify_user, content=json.dumps({
+ 'type': 'localityupdate-starting',
+ 'taskid': task.id
+ }))
+
+ return task.id
+
+
+def upload_locality_set_foreground(collection, specify_user, agent, column_headers: List[str], data: List[List[str]], create_recordset: bool):
+ result = _upload_locality_set(collection, column_headers, data)
+
+ if result["type"] == 'ParseError':
+ return result
+
+ localities = [row["locality"] for row in result["results"]]
+
+ recordset = create_localityupdate_recordset(
+ collection, specify_user, localities) if create_recordset else None
+
+ result["recordsetid"] = None if recordset is None else recordset.pk
+
+ return result
+
+
+@openapi(schema={
+ 'get': {
+ "responses": {
+ "200": {
+ "description": "Data fetched successfully",
+ "content": {
+ "application/json": {
+ "schema": {
+ "oneOf": [
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityUpdateStatus.PENDING, LocalityUpdateStatus.ABORTED]
+ },
+ "taskinfo": {
+ "type": "string",
+ },
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityUpdateStatus.PROGRESS, LocalityUpdateStatus.PARSING]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "current": {
+ "type": "number",
+ "example": 4,
+ },
+ "total": {
+ "type": "number",
+ "example": 20,
+ }
+ }
+ },
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityUpdateStatus.PARSED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "rows": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "locality": {
+ "type": "object"
+ },
+ "geocoorddetail": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "locality_id": {
+ "description": "The ID of the matched Locality",
+ "type": "number"
+ },
+ "row_number": {
+ "type": "number"
+ }
+ },
+ "required": ["locality", "geocoorddetail"]
+ }
+ }
+ }
+ },
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityUpdateStatus.SUCCEEDED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "recordsetid": {
+ "oneOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
+ }
+ ]
+ },
+ "localities": {
+ "type": "array",
+ "description": "An array of matched/updated Locality IDs",
+ "items": {
+ "type": "number"
+ }
+ },
+ "geocoorddetails": {
+ "type": "array",
+ "description": "An array of created GeoCoordDetail IDs",
+ "items": {
+ "type": "number"
+ }
+ }
+ },
+ "required": ["recordsetid", "localities", "geocoorddetails"],
+ "additionalProperties": False
+ }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityUpdateStatus.PARSE_FAILED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "errors": localityupdate_parse_error
+ }
+ }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ },
+ {
+ "type": "object",
+ "properties": {
+ "taskstatus": {
+ "type": "string",
+ "enum": [LocalityUpdateStatus.FAILED]
+ },
+ "taskinfo": {
+ "type": "object",
+ "properties": {
+ "error": {
+ "type": "string"
+ },
+ "traceback": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "required": ["taskstatus", "taskinfo"],
+ "additionalProperties": False
+ }
+ ]
+ }
+ }
+ }
+ },
+ "404": {
+ "description": 'The localityupdate object with task id was not found',
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string",
+ "example": "The localityupdate with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ }
+ }
+ }
+ }
+ }
+ },
+})
+@require_GET
+def localityupdate_status(request: http.HttpRequest, taskid: str):
+ try:
+ locality_update = LocalityUpdate.objects.get(taskid=taskid)
+ except LocalityUpdate.DoesNotExist:
+ return http.HttpResponseNotFound(f"The localityupdate with task id '{taskid}' was not found")
+
+ result = update_locality_task.AsyncResult(locality_update.taskid)
+
+ resolved_state = LocalityUpdateStatus.ABORTED if result.state == CELERY_TASK_STATE.REVOKED else LocalityUpdateStatus.FAILED if result.state == CELERY_TASK_STATE.FAILURE else result.state
+
+ status = {
+ 'taskstatus': resolved_state,
+ 'taskinfo': result.info if isinstance(result.info, dict) else repr(result.info)
+ }
+
+ if resolved_state == LocalityUpdateStatus.FAILED:
+ status["taskinfo"] = {
+ 'error': str(result.result),
+ 'traceback': str(result.traceback)
+ }
+
+ elif locality_update.status == LocalityUpdateStatus.PARSE_FAILED:
+
+ status["taskstatus"] = LocalityUpdateStatus.PARSE_FAILED
+
+ if isinstance(result.info, dict) and 'errors' in result.info.keys():
+ errors = result.info["errors"]
+ else:
+ results = locality_update.results.all()
+ errors = [json.loads(error.result) for error in results]
+
+ status["taskinfo"] = {"errors": errors}
+
+ elif locality_update.status == LocalityUpdateStatus.PARSED:
+ status["taskstatus"] = LocalityUpdateStatus.PARSED
+
+ results = locality_update.results.all()
+ rows = [json.loads(row.result) for row in results]
+
+ status["taskinfo"] = {
+ "rows": rows
+ }
+
+ elif locality_update.status == LocalityUpdateStatus.SUCCEEDED:
+ status["taskstatus"] = LocalityUpdateStatus.SUCCEEDED
+ recordset_id = locality_update.recordset.id if locality_update.recordset is not None else None
+ if isinstance(result.info, dict) and resolved_state == LocalityUpdateStatus.SUCCEEDED:
+ result = {
+ "recordsetid": recordset_id,
+ "localities": result.info["localities"],
+ "geocoorddetails": result.info["geocoorddetails"]
+ }
+ else:
+ results = locality_update.results.all()
+ localitites = []
+ geocoorddetails = []
+ for row in results:
+ parsed = json.loads(row.result)
+ localitites.append(parsed["locality"])
+ if parsed["geocoorddetail"] is not None:
+ geocoorddetails.append(parsed["geocoorddetail"])
+ result = {
+ "recordsetid": recordset_id,
+ "localities": localitites,
+ "geocoorddetails": geocoorddetails
+ }
+
+ status["taskinfo"] = result
+
+ return http.JsonResponse(status, safe=False)
+
+
+@openapi(schema={
+ 'post': {
+ 'responses': {
+ '200': {
+ 'description': 'The task has been successfully aborted or it is not running and cannot be aborted',
+ 'content': {
+ 'application/json': {
+ 'schema': {
+ 'type': 'object',
+ 'properties': {
+ 'type': {
+ 'type': 'string',
+ "enum": ["ABORTED", "NOT_RUNNING"]
+ },
+ 'message': {
+ 'type': 'string',
+ 'description': 'Response message about the status of the task'
+ },
+ },
+ "required": ["type", "message"],
+ "additionalProperties": False
+ },
+ },
+ },
+ },
+ "404": {
+ "description": 'The localityupdate object with task id was not found',
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string",
+ "example": "The localityupdate with task id '7d34dbb2-6e57-4c4b-9546-1fe7bec1acca' was not found"
+ }
+ }
+ }
+ }
+ },
+ },
+})
+@require_POST
+@login_maybe_required
+def abort_localityupdate_task(request: http.HttpRequest, taskid: str):
+ "Aborts the merge task currently running and matching the given merge/task ID"
+
+ try:
+ locality_update = LocalityUpdate.objects.get(taskid=taskid)
+ except LocalityUpdate.DoesNotExist:
+ return http.HttpResponseNotFound(f"The localityupdate with taskid: {taskid} is not found")
+
+ task = record_merge_task.AsyncResult(locality_update.taskid)
+
+ result = {
+ "type": None,
+ "message": None
+ }
+
+ if task.state in [LocalityUpdateStatus.PENDING, LocalityUpdateStatus.PARSING, LocalityUpdateStatus.PROGRESS]:
+ app.control.revoke(locality_update.taskid, terminate=True)
+
+ locality_update.status = LocalityUpdateStatus.ABORTED
+ locality_update.save()
+
+ Message.objects.create(user=request.specify_user, content=json.dumps({
+ 'type': 'localityupdate-aborted',
+ 'taskid': taskid
+ }))
+ result["type"] = "ABORTED"
+ result["message"] = f'Task {locality_update.taskid} has been aborted.'
+
+ else:
+ result["type"] = "NOT_RUNNING"
+ result["message"] = 'Task %s is not running and cannot be aborted' % locality_update.taskid
+
+ return http.JsonResponse(result, safe=False)
+
+
+@openapi(schema={
+ "post": {
+ "requestBody": {
+ "required": True,
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "columnHeaders": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "runInBackground": {
+ "type": "boolean",
+ "description": "Whether the task should be ran in the background. Defaults to True",
+ "default": False
+ }
+ },
+ "required": ["columnHeaders", "data"],
+ "additionalProperties": False
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Successful response returned by worker",
+ "content": {
+ "application/json": {
+ "schema": localityupdate_parse_success
+ }
+ }
+ },
+ "201": {
+ "description": "Task started by the worker. Returns the newly created ID of the task",
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string",
+ "maxLength": 36,
+ "example": "7d34dbb2-6e57-4c4b-9546-1fe7bec1acca",
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Locality Import Set not parsed successfully",
+ "content": {
+ "application/json": {
+ "schema": localityupdate_parse_error
+ }
+ }
+ }
+ }
+ }
+})
+@login_maybe_required
+@require_POST
+def parse_locality_set(request: http.HttpRequest):
+ """Parse a locality set without making any database changes and return the results
+ """
+ request_data = json.loads(request.body)
+
+ column_headers = request_data["columnHeaders"]
+ data = request_data["data"]
+ run_in_background = request_data.get("runInBackground", False)
+ if not run_in_background:
+ status, result = parse_locality_set_foreground(
+ request.specify_collection, column_headers, data)
+ else:
+ status, result = 201, start_locality_set_background(
+ request.specify_collection, request.specify_user, request.specify_user_agent, column_headers, data, False, True)
+ return http.JsonResponse(result, status=status, safe=False)
+
+
+def parse_locality_set_foreground(collection, column_headers: List[str], data: List[List[str]]) -> Tuple[int, Dict[str, Any]]:
+ parsed, errors = _parse_locality_set(
+ collection, column_headers, data)
+
+ if len(errors) > 0:
+ return 422, errors
+
+ return 200, parsed
diff --git a/specifyweb/stored_queries/tests.py b/specifyweb/stored_queries/tests.py
index aa15ddf70f2..f494b548652 100644
--- a/specifyweb/stored_queries/tests.py
+++ b/specifyweb/stored_queries/tests.py
@@ -803,7 +803,7 @@ class SQLAlchemyModelTest(TestCase):
def test_sqlalchemy_model_errors(self):
for table in spmodels.datamodel.tables:
table_errors = test_sqlalchemy_model(table)
- self.assertTrue(len(table_errors) == 0 or table.name in expected_errors)
+ self.assertTrue(len(table_errors) == 0 or table.name in expected_errors, f"Did not find {table.name}. Has errors: {table_errors}")
if 'not_found' in table_errors:
table_errors['not_found'] = sorted(table_errors['not_found'])
if table_errors:
diff --git a/specifyweb/workbench/upload/parsing.py b/specifyweb/workbench/upload/parsing.py
index 8ee6372e600..80ba608123c 100644
--- a/specifyweb/workbench/upload/parsing.py
+++ b/specifyweb/workbench/upload/parsing.py
@@ -1,37 +1,37 @@
-
import logging
-import math
-import re
-from datetime import datetime
-from decimal import Decimal
from typing import Dict, Any, Optional, List, NamedTuple, Tuple, Union, NoReturn
from django.core.exceptions import ObjectDoesNotExist
-from specifyweb.specify import models
-from specifyweb.specify.datamodel import datamodel, Table
-from specifyweb.specify.uiformatters import FormatMismatch
-from specifyweb.stored_queries.format import MYSQL_TO_YEAR, MYSQL_TO_MONTH
+from specifyweb.specify.datamodel import datamodel
from .column_options import ExtendedColumnOptions
+from specifyweb.specify.parse import parse_field, is_latlong, ParseSucess, ParseFailure
Row = Dict[str, str]
Filter = Dict[str, Any]
logger = logging.getLogger(__name__)
+
class PicklistAddition(NamedTuple):
picklist: Any
column: str
value: str
-class ParseFailure(NamedTuple):
+
+class WorkBenchParseFailure(NamedTuple):
message: str
payload: Dict[str, Union[str, int, List[str], List[int]]]
column: str
+ @classmethod
+ def from_parse_failure(cls, pf: ParseFailure, column: str):
+ return cls(message=pf.message, payload=pf.payload, column=column)
+
def to_json(self) -> List:
return list(self)
+
class ParseResult(NamedTuple):
filter_on: Filter
upload: Dict[str, Any]
@@ -39,27 +39,35 @@ class ParseResult(NamedTuple):
column: str
missing_required: Optional[str]
+ @classmethod
+ def from_parse_success(cls, ps: ParseSucess, filter_on: Filter, add_to_picklist: Optional[PicklistAddition], column: str, missing_required: Optional[str]):
+ return cls(filter_on=filter_on, upload=ps.to_upload, add_to_picklist=add_to_picklist, column=column, missing_required=missing_required)
+
def match_key(self) -> str:
from .uploadable import filter_match_key
return filter_match_key(self.filter_on)
+
def filter_and_upload(f: Filter, column: str) -> ParseResult:
return ParseResult(f, f, None, column, None)
-def parse_many(collection, tablename: str, mapping: Dict[str, ExtendedColumnOptions], row: Row) -> Tuple[List[ParseResult], List[ParseFailure]]:
+
+def parse_many(collection, tablename: str, mapping: Dict[str, ExtendedColumnOptions], row: Row) -> Tuple[List[ParseResult], List[WorkBenchParseFailure]]:
results = [
- parse_value(collection, tablename, fieldname, row[colopts.column], colopts)
+ parse_value(collection, tablename, fieldname,
+ row[colopts.column], colopts)
for fieldname, colopts in mapping.items()
]
return (
[r for r in results if isinstance(r, ParseResult)],
- [r for r in results if isinstance(r, ParseFailure)]
+ [r for r in results if isinstance(r, WorkBenchParseFailure)]
)
-def parse_value(collection, tablename: str, fieldname: str, value_in: str, colopts: ExtendedColumnOptions) -> Union[ParseResult, ParseFailure]:
+
+def parse_value(collection, tablename: str, fieldname: str, value_in: str, colopts: ExtendedColumnOptions) -> Union[ParseResult, WorkBenchParseFailure]:
required_by_schema = colopts.schemaitem and colopts.schemaitem.isrequired
- result: Union[ParseResult, ParseFailure]
+ result: Union[ParseResult, WorkBenchParseFailure]
was_blank = value_in.strip() == ""
if was_blank:
if colopts.default is None:
@@ -68,13 +76,16 @@ def parse_value(collection, tablename: str, fieldname: str, value_in: str, colop
"field is required by schema config" if required_by_schema else
None
)
- result = ParseResult({fieldname: None}, {}, None, colopts.column, missing_required)
+ result = ParseResult({fieldname: None}, {},
+ None, colopts.column, missing_required)
else:
- result = _parse(collection, tablename, fieldname, colopts, colopts.default)
+ result = _parse(collection, tablename, fieldname,
+ colopts, colopts.default)
else:
- result = _parse(collection, tablename, fieldname, colopts, value_in.strip())
+ result = _parse(collection, tablename, fieldname,
+ colopts, value_in.strip())
- if isinstance(result, ParseFailure):
+ if isinstance(result, WorkBenchParseFailure):
return result
if colopts.matchBehavior == "ignoreAlways":
@@ -90,18 +101,16 @@ def parse_value(collection, tablename: str, fieldname: str, value_in: str, colop
assertNever(colopts.matchBehavior)
-def _parse(collection, tablename: str, fieldname: str, colopts: ExtendedColumnOptions, value: str) -> Union[ParseResult, ParseFailure]:
- if tablename.lower() == 'agent' and fieldname.lower() == 'agenttype':
- return parse_agenttype(value, colopts.column)
-
+def _parse(collection, tablename: str, fieldname: str, colopts: ExtendedColumnOptions, value: str) -> Union[ParseResult, WorkBenchParseFailure]:
table = datamodel.get_table_strict(tablename)
field = table.get_field_strict(fieldname)
if colopts.picklist:
- result = parse_with_picklist(collection, colopts.picklist, fieldname, value, colopts.column)
+ result = parse_with_picklist(
+ collection, colopts.picklist, fieldname, value, colopts.column)
if result is not None:
if isinstance(result, ParseResult) and hasattr(field, 'length') and field.length is not None and len(result.upload[fieldname]) > field.length:
- return ParseFailure(
+ return WorkBenchParseFailure(
'pickListValueTooLong',
{
'pickList': colopts.picklist.name,
@@ -111,247 +120,51 @@ def _parse(collection, tablename: str, fieldname: str, colopts: ExtendedColumnOp
)
return result
- if field.is_relationship:
- return parse_integer(fieldname, value, colopts.column)
-
- if colopts.uiformatter:
- try:
- parsed = colopts.uiformatter.parse(value)
- except FormatMismatch as e:
- return ParseFailure(e.args[0], {}, colopts.column)
-
- if colopts.uiformatter.needs_autonumber(parsed):
- canonicalized = colopts.uiformatter.autonumber_now(collection, getattr(models, tablename.capitalize()), parsed)
- else:
- canonicalized = colopts.uiformatter.canonicalize(parsed)
-
- if hasattr(field, 'length') and field.length is not None and len(canonicalized) > field.length:
- return ParseFailure('valueTooLong', {'maxLength': field.length if field.length is not None else 0}, colopts.column)
-
- return filter_and_upload({fieldname: canonicalized}, colopts.column)
-
- if is_latlong(table, field):
- return parse_latlong(field, value, colopts.column)
+ parsed = parse_field(collection, tablename, fieldname, value)
- if field.is_temporal():
- return parse_date(table, fieldname, colopts.dateformat or "%Y-%m-%d", value, colopts.column)
+ if is_latlong(table, field) and isinstance(parsed, ParseSucess):
+ coord_text_field = field.name.replace('itude', '') + 'text'
+ filter_on = {coord_text_field: parsed.to_upload[coord_text_field]}
+ return ParseResult.from_parse_success(parsed, filter_on, None, colopts.column, None)
- if field.type == "java.lang.Boolean":
- return parse_boolean(fieldname, value, colopts.column)
-
- if field.type == 'java.math.BigDecimal':
- return parse_decimal(fieldname, value, colopts.column)
-
- if field.type in ('java.lang.Float', 'java.lang.Double'):
- return parse_float(fieldname, value, colopts.column)
-
- if field.type in ('java.lang.Integer', 'java.lang.Long', 'java.lang.Byte', 'java.lang.Short'):
- return parse_integer(fieldname, value, colopts.column)
-
- if hasattr(field, 'length') and field.length is not None and len(value) > field.length:
- return ParseFailure('valueTooLong', {'maxLength': field.length if field.length is not None else 0}, colopts.column)
+ if isinstance(parsed, ParseFailure):
+ return WorkBenchParseFailure.from_parse_failure(parsed, colopts.column)
+ else:
+ return ParseResult.from_parse_success(parsed, parsed.to_upload, None, colopts.column, None)
- return filter_and_upload({fieldname: value}, colopts.column)
-def parse_boolean(fieldname: str, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- if value.lower() in ["yes", "true"]:
- result = True
- elif value.lower() in ["no", "false"]:
- result = False
- else:
- return ParseFailure(
- 'failedParsingBoolean',
- {'value': value},
- column
- )
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_decimal(fieldname: str, value: str, column) -> Union[ParseResult, ParseFailure]:
- try:
- result = Decimal(value)
- except Exception as e:
- return ParseFailure(
- 'failedParsingDecimal',
- {'value': value},
- column
- )
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_float(fieldname: str, value: str, column) -> Union[ParseResult, ParseFailure]:
- try:
- result = float(value)
- except ValueError as e:
- return ParseFailure('failedParsingFloat', {'value': value}, column)
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_integer(fieldname: str, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- try:
- result = int(value)
- except ValueError as e:
- return ParseFailure('failedParsingDecimal', {'value': value}, column)
-
- return filter_and_upload({fieldname: result}, column)
-
-def parse_with_picklist(collection, picklist, fieldname: str, value: str, column: str) -> Union[ParseResult, ParseFailure, None]:
- if picklist.type == 0: # items from picklistitems table
+def parse_with_picklist(collection, picklist, fieldname: str, value: str, column: str) -> Union[ParseResult, WorkBenchParseFailure, None]:
+ if picklist.type == 0: # items from picklistitems table
try:
item = picklist.picklistitems.get(title=value)
return filter_and_upload({fieldname: item.value}, column)
except ObjectDoesNotExist:
if picklist.readonly:
- return ParseFailure(
+ return WorkBenchParseFailure(
'failedParsingPickList',
{'value': value},
column
)
else:
return filter_and_upload({fieldname: value}, column)._replace(
- add_to_picklist=PicklistAddition(picklist=picklist, column=column, value=value)
+ add_to_picklist=PicklistAddition(
+ picklist=picklist, column=column, value=value)
)
return filter_and_upload({fieldname: value})
- elif picklist.type == 1: # items from rows in some table
+ elif picklist.type == 1: # items from rows in some table
# we ignore this type of picklist because it is primarily used to choose many-to-one's on forms
# so it is not expected to appear on actual fields
return None
- elif picklist.type == 2: # items from a field in some table
+ elif picklist.type == 2: # items from a field in some table
# this picklist type is rarely used and seems mostly for convenience on forms to allow
# quickly selecting existing values from other rows in the same table. e.g. moleculeType
return None
else:
- raise NotImplementedError("unknown picklist type {}".format(picklist.type))
-
-def parse_agenttype(value: str, column: str) -> Union[ParseResult, ParseFailure]:
- agenttypes = ['Organization', 'Person', 'Other', 'Group']
-
- value = value.capitalize()
- try:
- agenttype = agenttypes.index(value)
- except ValueError:
- return ParseFailure('failedParsingAgentType', {'badType': value, 'validTypes': agenttypes}, column)
- return filter_and_upload({'agenttype': agenttype}, column)
-
-def parse_date(table: Table, fieldname: str, dateformat: str, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- if re.search('[0-9]{4}', value) is None:
- return ParseFailure('invalidYear',{'value':value}, column)
-
- dateformat = dateformat.replace('%y', '%Y')
- precision_field = table.get_field(fieldname + 'precision')
- if precision_field is None:
- try:
- date = datetime.strptime(value, dateformat).date()
- except ValueError:
- return ParseFailure('badDateFormat', {'value':value,'format':dateformat}, column)
- return filter_and_upload({fieldname: date}, column)
-
- date_formats = [
- dateformat,
- MYSQL_TO_MONTH[dateformat],
- MYSQL_TO_YEAR[dateformat],
- dateformat.replace('%d', '00'),
- re.sub('(%m)|(%d)', '00', dateformat),
- ]
-
- for df in date_formats:
- try:
- date = datetime.strptime(value, df).date()
- except ValueError:
- continue
- if '%d' in df:
- return filter_and_upload({fieldname: date, precision_field.name.lower(): 1}, column)
- elif '%m' in df or '%b' in df:
- return filter_and_upload({fieldname: date.replace(day=1), precision_field.name.lower(): 2}, column)
- else:
- return filter_and_upload({fieldname: date.replace(day=1, month=1), precision_field.name.lower(): 3}, column)
-
- return ParseFailure('badDateFormat', {'value':value, 'format':dateformat}, column)
-
-def parse_string(value: str) -> Optional[str]:
- result = value.strip()
- if result == "":
- return None
- return result
-
-def is_latlong(table, field) -> bool:
- return table.name == 'Locality' \
- and field.name in ('latitude1', 'longitude1', 'latitude2', 'longitude2')
-
-def parse_latlong(field, value: str, column: str) -> Union[ParseResult, ParseFailure]:
- parsed = parse_coord(value)
-
- if parsed is None:
- return ParseFailure('coordinateBadFormat', {'value':value}, column)
-
- coord, unit = parsed
- if field.name.startswith('lat') and abs(coord) >= 90:
- return ParseFailure('latitudeOutOfRange', {'value':value}, column)
-
- if field.name.startswith('long') and abs(coord) >= 180:
- return ParseFailure('longitudeOutOfRange', {'value': value}, column)
-
- text_filter = {field.name.replace('itude', '') + 'text': parse_string(value)}
- return ParseResult(
- text_filter,
- {field.name: coord, 'originallatlongunit': unit, **text_filter},
- None,
- column,
- None
- )
-
-
-def parse_coord(value: str) -> Optional[Tuple[float, int]]:
- for p in LATLONG_PARSER_DEFS:
- match = re.compile(p.regex, re.I).match(value)
- if match and match.group(1):
- try:
- # relies on signed zeros in floats
- # see https://docs.python.org/3/library/math.html#math.copysign
- comps = [float(match.group(i)) for i in p.comp_groups]
- except ValueError:
- continue
- result, divisor = 0.0, 1
- for comp in comps:
- result += abs(comp) / divisor
- divisor *= 60
- result = math.copysign(result, comps[0])
- if match.group(p.dir_group).lower() in ("s", "w"):
- result = -result
- return (result, p.unit)
- return None
-
-class LatLongParserDef(NamedTuple):
- regex: str
- comp_groups: List[int]
- dir_group: int
- unit: int
-
-LATLONG_PARSER_DEFS = [
- LatLongParserDef(
- r'^(-?\d{0,3}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
- [1],
- 3,
- 0
- ),
-
- LatLongParserDef(
- r'^(-?\d{1,3})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
- [1, 2],
- 4,
- 2
- ),
-
- LatLongParserDef(
- r'^(-?\d{1,3})[^\d\.]+(\d{1,2})[^\d\.]+(\d{0,2}(\.\d*)?)[^\d\.nsew]*([nsew]?)$',
- [1, 2, 3],
- 5,
- 1
- ),
-]
+ raise NotImplementedError(
+ "unknown picklist type {}".format(picklist.type))
def assertNever(x: NoReturn) -> NoReturn:
diff --git a/specifyweb/workbench/upload/tests/test_upload_results_json.py b/specifyweb/workbench/upload/tests/test_upload_results_json.py
index 27365990761..539421b8fb5 100644
--- a/specifyweb/workbench/upload/tests/test_upload_results_json.py
+++ b/specifyweb/workbench/upload/tests/test_upload_results_json.py
@@ -69,7 +69,7 @@ def testUploadResultExplicit(self):
columns=['report info column 1', 'report info column 2'],
treeInfo=None
))
- parse_failure: ParseFailure = ParseFailure(
+ parse_failure: WorkBenchParseFailure = WorkBenchParseFailure(
message='parse failure message',
payload={'parse failure payload key 1': 'parse failure payload value 1', 'parse failure payload key 2': 'parse failure payload value 2'},
column='parse failure column')
diff --git a/specifyweb/workbench/upload/tests/testparsing.py b/specifyweb/workbench/upload/tests/testparsing.py
index d8156817035..a3a4f775c4e 100644
--- a/specifyweb/workbench/upload/tests/testparsing.py
+++ b/specifyweb/workbench/upload/tests/testparsing.py
@@ -11,14 +11,15 @@
from specifyweb.specify.datamodel import datamodel
from specifyweb.stored_queries.format import LDLM_TO_MYSQL, MYSQL_TO_MONTH, \
MYSQL_TO_YEAR
+from specifyweb.specify.parse import parse_coord, parse_date, ParseFailure, ParseSucess
from .base import UploadTestsBase, get_table
from ..column_options import ColumnOptions
-from ..parsing import parse_coord, parse_date, ParseResult as PR
+from ..parsing import ParseResult as PR
from ..treerecord import TreeRecord
from ..upload import do_upload, do_upload_csv
from ..upload_plan_schema import parse_column_options
from ..upload_result import Uploaded, Matched, NullRecord, ParseFailures, \
- ParseFailure
+ WorkBenchParseFailure
from ..upload_results_schema import schema as upload_results_schema
from ..upload_table import UploadTable
@@ -27,54 +28,54 @@
class DateParsingTests(unittest.TestCase):
def test_bad1(self) -> None:
- result = parse_date(co, 'catalogeddate', '%d/%m/%Y', 'foobar', 'catdate')
- self.assertEqual(ParseFailure(message='invalidYear', payload={'value':'foobar'}, column='catdate'), result)
+ result = parse_date(co, 'catalogeddate', '%d/%m/%Y', 'foobar')
+ self.assertEqual(ParseFailure(message='invalidYear', payload={'value':'foobar'}), result)
def test_bad2(self) -> None:
- result = parse_date(co, 'catalogeddate', '%d/%m/%Y', '1978-7-24', 'catdate')
- self.assertEqual(ParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}, column='catdate'), result)
+ result = parse_date(co, 'catalogeddate', '%d/%m/%Y', '1978-7-24')
+ self.assertEqual(ParseFailure(message='badDateFormat', payload={'value':'1978-7-24', 'format':'%d/%m/%Y'}), result)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_full_date(self, date, format) -> None:
datestr = date.strftime(format)
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date, 'catalogeddateprecision': 1}, result.upload)
+ result = parse_date(co, 'catalogeddate', format, datestr)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date, 'catalogeddateprecision': 1}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_month(self, date, format) -> None:
datestr = date.strftime(MYSQL_TO_MONTH[format])
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.upload)
+ result = parse_date(co, 'catalogeddate', format, datestr)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_year(self, date, format) -> None:
datestr = date.strftime(MYSQL_TO_YEAR[format])
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1, month=1), 'catalogeddateprecision': 3}, result.upload)
+ result = parse_date(co, 'catalogeddate', format, datestr)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1, month=1), 'catalogeddateprecision': 3}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f]))
def test_zero_day(self, date, format) -> None:
datestr = date.strftime(re.sub('%d', '00', format))
self.assertTrue('00' in datestr)
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.upload)
+ result = parse_date(co, 'catalogeddate', format, datestr)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1), 'catalogeddateprecision': 2}, result.to_upload)
@given(st.dates(min_value=date(1000,1,1)), st.sampled_from([f for f in LDLM_TO_MYSQL.values() if '%Y' in f and '%b' not in f]))
def test_zero_month(self, date, format) -> None:
datestr = date.strftime(re.sub('(%d)|(%m)', '00', format))
self.assertIn('00', datestr)
- result = parse_date(co, 'catalogeddate', format, datestr, 'catdate')
- self.assertIsInstance(result, PR)
- assert isinstance(result, PR)
- self.assertEqual({'catalogeddate': date.replace(day=1,month=1), 'catalogeddateprecision': 3}, result.upload)
+ result = parse_date(co, 'catalogeddate', format, datestr)
+ self.assertIsInstance(result, ParseSucess)
+ assert isinstance(result, ParseSucess)
+ self.assertEqual({'catalogeddate': date.replace(day=1,month=1), 'catalogeddateprecision': 3}, result.to_upload)
class ParsingTests(UploadTestsBase):
def setUp(self) -> None:
@@ -279,7 +280,7 @@ def test_readonly_picklist(self) -> None:
result2 = results[2].record_result
assert isinstance(result2, ParseFailures)
- self.assertEqual([ParseFailure(
+ self.assertEqual([WorkBenchParseFailure(
message='failedParsingPickList',
payload={'value': 'Hon.'},
column='title'
@@ -343,7 +344,7 @@ def test_multiple_parsing_errors_reported(self) -> None:
failed_result = upload_results[0].record_result
self.assertIsInstance(failed_result, ParseFailures)
assert isinstance(failed_result, ParseFailures) # make typechecker happy
- self.assertEqual([ParseFailure(message='invalidYear', payload={'value':'foobar'}, column='Start Date Collected'), ParseFailure(message='invalidYear', payload={'value': 'bad date'}, column='ID Date')], failed_result.failures)
+ self.assertEqual([WorkBenchParseFailure(message='invalidYear', payload={'value':'foobar'}, column='Start Date Collected'), WorkBenchParseFailure(message='invalidYear', payload={'value': 'bad date'}, column='ID Date')], failed_result.failures)
def test_out_of_range_lat_long(self) -> None:
reader = csv.DictReader(io.StringIO(
@@ -354,7 +355,7 @@ def test_out_of_range_lat_long(self) -> None:
failed_result = upload_results[0].record_result
self.assertIsInstance(failed_result, ParseFailures)
assert isinstance(failed_result, ParseFailures) # make typechecker happy
- self.assertEqual([ParseFailure(message='latitudeOutOfRange', payload={'value':'128° 06.07\' N'}, column='Latitude1'), ParseFailure(message='longitudeOutOfRange', payload={'value': '191° 02.42\' W'}, column='Longitude1')], failed_result.failures)
+ self.assertEqual([WorkBenchParseFailure(message='latitudeOutOfRange', payload={'value':'128° 06.07\' N'}, column='Latitude1'), WorkBenchParseFailure(message='longitudeOutOfRange', payload={'value': '191° 02.42\' W'}, column='Longitude1')], failed_result.failures)
def test_agent_type(self) -> None:
plan = UploadTable(
@@ -387,7 +388,7 @@ def test_agent_type(self) -> None:
result2 = results[2].record_result
assert isinstance(result2, ParseFailures)
- self.assertEqual([ParseFailure(message='failedParsingAgentType',payload={'badType':'Extra terrestrial','validTypes':['Organization', 'Person', 'Other', 'Group']}, column='agenttype')], result2.failures)
+ self.assertEqual([WorkBenchParseFailure(message='failedParsingAgentType',payload={'badType':'Extra terrestrial','validTypes':['Organization', 'Person', 'Other', 'Group']}, column='agenttype')], result2.failures)
result3 = results[3].record_result
assert isinstance(result3, Uploaded)
@@ -412,7 +413,7 @@ def test_tree_cols_without_name(self) -> None:
results = do_upload(self.collection, data, plan, self.agent.id)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='invalidPartialRecord', payload={'column':'Species'}, column='Species Author')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='invalidPartialRecord', payload={'column':'Species'}, column='Species Author')]))
def test_value_too_long(self) -> None:
plan = TreeRecord(
@@ -431,7 +432,7 @@ def test_value_too_long(self) -> None:
self.assertIsInstance(results[0].record_result, Uploaded)
self.assertIsInstance(results[1].record_result, Uploaded)
- self.assertEqual(results[2].record_result, ParseFailures(failures=[ParseFailure(message='valueTooLong', payload={'maxLength': 128}, column='Species Author')]))
+ self.assertEqual(results[2].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='valueTooLong', payload={'field': 'author', 'maxLength': 128}, column='Species Author')]))
class MatchingBehaviorTests(UploadTestsBase):
@@ -795,7 +796,7 @@ def test_wbcols_with_null_disallowed(self) -> None:
validate([result.to_json()], upload_results_schema)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
self.assertIsInstance(results[2].record_result, Uploaded)
def test_wbcols_with_null_disallowed_and_ignoreWhenBlank(self) -> None:
@@ -822,7 +823,7 @@ def test_wbcols_with_null_disallowed_and_ignoreWhenBlank(self) -> None:
validate([result.to_json()], upload_results_schema)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
self.assertIsInstance(results[2].record_result, Uploaded)
self.assertIsInstance(results[3].record_result, Matched)
self.assertIsInstance(results[4].record_result, Uploaded)
@@ -851,7 +852,7 @@ def test_wbcols_with_null_disallowed_and_ignoreAlways(self) -> None:
validate([result.to_json()], upload_results_schema)
self.assertIsInstance(results[0].record_result, Uploaded)
- self.assertEqual(results[1].record_result, ParseFailures(failures=[ParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
+ self.assertEqual(results[1].record_result, ParseFailures(failures=[WorkBenchParseFailure(message='field is required by upload plan mapping', payload={}, column='firstname')]))
self.assertIsInstance(results[2].record_result, Uploaded)
self.assertIsInstance(results[3].record_result, Matched)
self.assertIsInstance(results[4].record_result, Matched)
diff --git a/specifyweb/workbench/upload/treerecord.py b/specifyweb/workbench/upload/treerecord.py
index 14fc286bdab..d34f7619aa5 100644
--- a/specifyweb/workbench/upload/treerecord.py
+++ b/specifyweb/workbench/upload/treerecord.py
@@ -11,7 +11,7 @@
from specifyweb.businessrules.exceptions import BusinessRuleException
from specifyweb.specify import models
from .column_options import ColumnOptions, ExtendedColumnOptions
-from .parsing import ParseResult, ParseFailure, parse_many, filter_and_upload
+from .parsing import ParseResult, WorkBenchParseFailure, parse_many, filter_and_upload
from .upload_result import UploadResult, NullRecord, NoMatch, Matched, \
MatchedMultiple, Uploaded, ParseFailures, FailedBusinessRule, ReportInfo, \
TreeInfo
@@ -59,7 +59,7 @@ def get_treedefs(self) -> Set:
def bind(self, collection, row: Row, uploadingAgentId: Optional[int], auditor: Auditor, cache: Optional[Dict]=None, row_index: Optional[int] = None) -> Union["BoundTreeRecord", ParseFailures]:
parsedFields: Dict[str, List[ParseResult]] = {}
- parseFails: List[ParseFailure] = []
+ parseFails: List[WorkBenchParseFailure] = []
for rank, cols in self.ranks.items():
nameColumn = cols['name']
presults, pfails = parse_many(collection, self.name, cols, row)
@@ -68,7 +68,7 @@ def bind(self, collection, row: Row, uploadingAgentId: Optional[int], auditor: A
filters = {k: v for result in presults for k, v in result.filter_on.items()}
if filters.get('name', None) is None:
parseFails += [
- ParseFailure('invalidPartialRecord',{'column':nameColumn.column}, result.column)
+ WorkBenchParseFailure('invalidPartialRecord',{'column':nameColumn.column}, result.column)
for result in presults
if any(v is not None for v in result.filter_on.values())
]
@@ -303,7 +303,7 @@ def _upload(self, to_upload: List[TreeDefItemWithParseResults], matched: Union[M
missing_requireds = [
# TODO: there should probably be a different structure for
# missing required fields than ParseFailure
- ParseFailure(r.missing_required, {}, r.column)
+ WorkBenchParseFailure(r.missing_required, {}, r.column)
for tdiwpr in to_upload
for r in tdiwpr.results
if r.missing_required is not None
diff --git a/specifyweb/workbench/upload/upload_result.py b/specifyweb/workbench/upload/upload_result.py
index a993c330387..bda62905e41 100644
--- a/specifyweb/workbench/upload/upload_result.py
+++ b/specifyweb/workbench/upload/upload_result.py
@@ -2,7 +2,7 @@
from typing_extensions import Literal
-from .parsing import ParseFailure
+from .parsing import WorkBenchParseFailure
Failure = Literal["Failure"]
@@ -153,7 +153,7 @@ def json_to_NoMatch(json: Dict) -> NoMatch:
return NoMatch(info=json_to_ReportInfo(r['info']))
class ParseFailures(NamedTuple):
- failures: List[ParseFailure]
+ failures: List[WorkBenchParseFailure]
def get_id(self) -> Failure:
return "Failure"
@@ -163,7 +163,7 @@ def to_json(self):
def json_to_ParseFailures(json: Dict) -> ParseFailures:
r = json['ParseFailures']
- return ParseFailures(failures=[ParseFailure(*i) for i in r['failures']])
+ return ParseFailures(failures=[WorkBenchParseFailure(*i) for i in r['failures']])
class PropagatedFailure(NamedTuple):
def get_id(self) -> Failure:
diff --git a/specifyweb/workbench/upload/upload_table.py b/specifyweb/workbench/upload/upload_table.py
index 70e4c49860e..e361ebb478f 100644
--- a/specifyweb/workbench/upload/upload_table.py
+++ b/specifyweb/workbench/upload/upload_table.py
@@ -8,7 +8,7 @@
from specifyweb.businessrules.exceptions import BusinessRuleException
from specifyweb.specify import models
from .column_options import ColumnOptions, ExtendedColumnOptions
-from .parsing import parse_many, ParseResult, ParseFailure
+from .parsing import parse_many, ParseResult, WorkBenchParseFailure
from .tomany import ToManyRecord, ScopedToManyRecord, BoundToManyRecord
from .upload_result import UploadResult, Uploaded, NoMatch, Matched, \
MatchedMultiple, NullRecord, FailedBusinessRule, ReportInfo, \
@@ -467,7 +467,7 @@ def _do_upload(self, model, toOneResults: Dict[str, UploadResult], info: ReportI
missing_requireds = [
# TODO: there should probably be a different structure for
# missing required fields than ParseFailure
- ParseFailure(parsedField.missing_required, {}, parsedField.column)
+ WorkBenchParseFailure(parsedField.missing_required, {}, parsedField.column)
for parsedField in self.parsedFields
if parsedField.missing_required is not None
]