diff --git a/specifyweb/backend/delete_blockers/views.py b/specifyweb/backend/delete_blockers/views.py index 034a5339b2c..e562390aa76 100644 --- a/specifyweb/backend/delete_blockers/views.py +++ b/specifyweb/backend/delete_blockers/views.py @@ -1,6 +1,7 @@ from django import http from django.db import router, transaction from django.db.models.deletion import Collector +from django.db.models import ForeignKey from specifyweb.middleware.general import require_http_methods from specifyweb.specify.api.crud import ( @@ -43,13 +44,66 @@ def _collect_delete_blockers(obj, using) -> list[dict]: collector.collect([obj]) return flatten([ [ - { - 'table': sub_objs[0].__class__.__name__, - 'field': field.name, - 'ids': [sub_obj.id for sub_obj in sub_objs] - } + _serialize_delete_blocker(field, sub_objs) ] for field, sub_objs in collector.delete_blockers ]) +def _serialize_delete_blocker(field, sub_objs) -> dict: + normalized = _normalize_many_to_many_blocker(field, sub_objs) + if normalized is not None: + return normalized + + return { + 'table': sub_objs[0].__class__.__name__, + 'field': field.name, + 'ids': [sub_obj.id for sub_obj in sub_objs] + } + +def _normalize_many_to_many_blocker(field, sub_objs) -> dict | None: + through_model = sub_objs[0].__class__ + if hasattr(through_model, 'specify_model'): + return None + + foreign_keys = [ + model_field + for model_field in through_model._meta.fields + if isinstance(model_field, ForeignKey) + ] + if len(foreign_keys) != 2: + return None + + other_field = next( + ( + model_field + for model_field in foreign_keys + if model_field.name != field.name + ), + None, + ) + if other_field is None: + return None + + other_model = other_field.related_model + if not hasattr(other_model, 'specify_model'): + return None + + relationship = next( + ( + relationship + for relationship in other_model.specify_model.relationships + if getattr(relationship, 'through_model', None) == through_model.__name__ + and getattr(relationship, 'through_field', None) == other_field.name + ), + None, + ) + if relationship is None: + return None + + return { + 'table': other_model.specify_model.name, + 'field': relationship.name, + 'ids': [getattr(sub_obj, other_field.attname) for sub_obj in sub_objs], + } + def flatten(l): return [item for sublist in l for item in sublist] diff --git a/specifyweb/frontend/js_src/lib/components/DataModel/businessRuleDefs.ts b/specifyweb/frontend/js_src/lib/components/DataModel/businessRuleDefs.ts index 5b0fe4fe7d0..d8921c1df49 100644 --- a/specifyweb/frontend/js_src/lib/components/DataModel/businessRuleDefs.ts +++ b/specifyweb/frontend/js_src/lib/components/DataModel/businessRuleDefs.ts @@ -201,9 +201,10 @@ export const businessRuleDefs: MappedBusinessRuleDefs = { return undefined; }, catalogNumber: async (resource): Promise => { - const preferences = await import( - '../Preferences/collectionPreferences' - ).then(({ collectionPreferences }) => collectionPreferences); + const preferences = + await import('../Preferences/collectionPreferences').then( + ({ collectionPreferences }) => collectionPreferences + ); const uniqueCatalogNumberAccrossComponentAndCOPref = preferences.get( 'uniqueCatalogNumberAccrossComponentAndCO', @@ -429,9 +430,10 @@ export const businessRuleDefs: MappedBusinessRuleDefs = { return undefined; }, catalogNumber: async (resource): Promise => { - const preferences = await import( - '../Preferences/collectionPreferences' - ).then(({ collectionPreferences }) => collectionPreferences); + const preferences = + await import('../Preferences/collectionPreferences').then( + ({ collectionPreferences }) => collectionPreferences + ); const uniqueCatalogNumberAccrossComponentAndCOPref = preferences.get( 'uniqueCatalogNumberAccrossComponentAndCO', diff --git a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ExpressSearchConfigDialog.tsx b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ExpressSearchConfigDialog.tsx index 6c5b8a531b5..81cc07e0bd3 100644 --- a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ExpressSearchConfigDialog.tsx +++ b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ExpressSearchConfigDialog.tsx @@ -14,7 +14,7 @@ type ExpressSearchConfigDialogProps = { readonly isOpen: boolean; readonly onClose: () => void; readonly onSave?: () => void; -} +}; export function ExpressSearchConfigDialog({ isOpen, @@ -76,7 +76,7 @@ export function ExpressSearchConfigDialog({ isOpen={isOpen} onClose={onClose} > - diff --git a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ResultsOrderingTab.tsx b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ResultsOrderingTab.tsx index 4478fd66f5a..437e55abc1b 100644 --- a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ResultsOrderingTab.tsx +++ b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/ResultsOrderingTab.tsx @@ -11,12 +11,17 @@ import { genericTables } from '../DataModel/tables'; function tableLabel(tableName: string): string { return ( - (genericTables[tableName as keyof typeof genericTables]?.label as string | undefined) ?? - camelToHuman(tableName) + (genericTables[tableName as keyof typeof genericTables]?.label as + | string + | undefined) ?? camelToHuman(tableName) ); } -export function ResultsOrderingTab({ config, relatedQueriesDefinitions = [], onChangeConfig }: any) { +export function ResultsOrderingTab({ + config, + relatedQueriesDefinitions = [], + onChangeConfig, +}: any) { const baseTables = config.tables .filter((t: any) => t.searchFields.some((sf: any) => sf.inUse !== false)) .map((t: any) => ({ @@ -29,8 +34,12 @@ export function ResultsOrderingTab({ config, relatedQueriesDefinitions = [], onC const activeQueries = config.relatedQueries .filter((rq: any) => rq.isActive) .map((rq: any) => { - const def = relatedQueriesDefinitions.find((def: any) => def.id === rq.id); - const title = def?.name ? getExpressSearchQueryTitle(def.name) : undefined; + const def = relatedQueriesDefinitions.find( + (def: any) => def.id === rq.id + ); + const title = def?.name + ? getExpressSearchQueryTitle(def.name) + : undefined; if (!def || !title || title === String(def.name)) { return undefined; @@ -86,7 +95,9 @@ export function ResultsOrderingTab({ config, relatedQueriesDefinitions = [], onC return (
-

{expressSearchConfigText.configureResultsOrdering()}

+

+ {expressSearchConfigText.configureResultsOrdering()} +

{expressSearchConfigText.reorderResultsOrderingDescription()}

@@ -99,7 +110,10 @@ export function ResultsOrderingTab({ config, relatedQueriesDefinitions = [], onC > {item.label}
- moveItem(index, 'up')}> + moveItem(index, 'up')} + > {icons.chevronUp} { @@ -65,42 +65,43 @@ describe('ExpressSearchConfigEditor', () => { }); expect(onChangeJSON).toHaveBeenCalled(); - const latestConfig = onChangeJSON.mock.calls[onChangeJSON.mock.calls.length - 1][0]; + const latestConfig = + onChangeJSON.mock.calls[onChangeJSON.mock.calls.length - 1][0]; expect(latestConfig.tables[0].tableName).toBe('Agent'); expect(latestConfig.tables[0].searchFields[0].fieldName).toBe('firstName'); }); test('renders loading state initially', async () => { const { getByText } = mount( - ); expect(getByText('Loading...')).toBeInTheDocument(); - + // Wait for it to finish loading to avoid act warnings await act(async () => { - await new Promise(resolve => setTimeout(resolve, 0)); + await new Promise((resolve) => setTimeout(resolve, 0)); }); }); test('renders tabs after data load', async () => { const { findByRole } = mount( - ); - + expect(await findByRole('tablist')).toBeInTheDocument(); }); test('switches tabs correctly', async () => { const { findByText, getByRole, user } = mount( - ); @@ -112,7 +113,7 @@ describe('ExpressSearchConfigEditor', () => { await act(async () => { await user.click(relatedTab); }); - + expect(await findByText('Related Tables Tab')).toBeInTheDocument(); // Click Results Ordering diff --git a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/RelatedTablesTab.test.tsx b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/RelatedTablesTab.test.tsx index bacba609689..e096a045a54 100644 --- a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/RelatedTablesTab.test.tsx +++ b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/RelatedTablesTab.test.tsx @@ -43,7 +43,9 @@ describe('RelatedTablesTab', () => { expect(onChangeConfig).toHaveBeenCalledTimes(1); const newConfig = onChangeConfig.mock.calls[0][0]; - expect(newConfig.relatedQueries.find((rq: any) => rq.id === '2').isActive).toBe(true); + expect( + newConfig.relatedQueries.find((rq: any) => rq.id === '2').isActive + ).toBe(true); const activeRow = rows[0]; const activeCheckbox = activeRow.querySelector('input[type="checkbox"]'); @@ -54,6 +56,8 @@ describe('RelatedTablesTab', () => { expect(onChangeConfig).toHaveBeenCalledTimes(2); const secondConfig = onChangeConfig.mock.calls[1][0]; - expect(secondConfig.relatedQueries.find((rq: any) => rq.id === '1').isActive).toBe(false); + expect( + secondConfig.relatedQueries.find((rq: any) => rq.id === '1').isActive + ).toBe(false); }); }); diff --git a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/ResultsOrderingTab.test.tsx b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/ResultsOrderingTab.test.tsx index 5997fc65f7c..4b15f595931 100644 --- a/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/ResultsOrderingTab.test.tsx +++ b/specifyweb/frontend/js_src/lib/components/ExpressSearchConfig/__tests__/ResultsOrderingTab.test.tsx @@ -39,9 +39,7 @@ describe('ResultsOrderingTab', () => { displayFields: [], }, ], - relatedQueries: [ - { id: '8', isActive: true, displayOrder: 1 }, - ], + relatedQueries: [{ id: '8', isActive: true, displayOrder: 1 }], }; const onChangeConfig = jest.fn(); diff --git a/specifyweb/frontend/js_src/lib/components/FormCells/FormTable.tsx b/specifyweb/frontend/js_src/lib/components/FormCells/FormTable.tsx index 0eaae790141..baf13a48768 100644 --- a/specifyweb/frontend/js_src/lib/components/FormCells/FormTable.tsx +++ b/specifyweb/frontend/js_src/lib/components/FormCells/FormTable.tsx @@ -214,8 +214,8 @@ export function FormTable({ resource.cid, Boolean( resource.specifyTable.name === 'Preparation' && - collectionPreparationPref && - resource.isNew() + collectionPreparationPref && + resource.isNew() ), ]) ) diff --git a/specifyweb/frontend/js_src/lib/components/FormParse/__tests__/index.test.ts b/specifyweb/frontend/js_src/lib/components/FormParse/__tests__/index.test.ts index 7e40f34979f..f116f3d913d 100644 --- a/specifyweb/frontend/js_src/lib/components/FormParse/__tests__/index.test.ts +++ b/specifyweb/frontend/js_src/lib/components/FormParse/__tests__/index.test.ts @@ -471,6 +471,30 @@ describe('parseFormDefinition', () => { describe('getColumnDefinitions', () => { requireContext(); + test('prefers linux definition over generic definition', () => + expect( + getColumnDefinitions( + xml( + ` + Generic + Linux + ` + ) + ) + ).toBe('Linux')); + + test('uses generic definition if linux definition is not available', () => + expect( + getColumnDefinitions( + xml( + ` + Mac + Generic + ` + ) + ) + ).toBe('Generic')); + test('fall back to first definition available', () => expect( getColumnDefinitions( @@ -510,7 +534,7 @@ theories(getColumnDefinition, [ ), undefined, ], - out: 'B', + out: 'A', }, ]); diff --git a/specifyweb/frontend/js_src/lib/components/FormParse/index.ts b/specifyweb/frontend/js_src/lib/components/FormParse/index.ts index a5d10dfd9df..86e5325b50f 100644 --- a/specifyweb/frontend/js_src/lib/components/FormParse/index.ts +++ b/specifyweb/frontend/js_src/lib/components/FormParse/index.ts @@ -90,6 +90,8 @@ export const formTypes = ['form', 'formTable'] as const; export type FormType = (typeof formTypes)[number]; export type FormMode = 'edit' | 'search' | 'view'; +const defaultColumnDefinitionOs = 'lnx'; + let views: R = {}; export const getViewSetApiUrl = (viewName: string): string => @@ -540,10 +542,25 @@ function getColumnDefinitions(viewDefinition: SimpleXmlNode): string { const getColumnDefinition = ( viewDefinition: SimpleXmlNode, os: string | undefined -): string | undefined => - viewDefinition.children.columnDef?.find((child) => - typeof os === 'string' ? getParsedAttribute(child, 'os') === os : true - )?.text; +): string | undefined => { + const columnDefinitions = viewDefinition.children.columnDef; + if (columnDefinitions === undefined) return undefined; + + if (typeof os === 'string') + return columnDefinitions.find( + (child) => getParsedAttribute(child, 'os') === os + )?.text; + + return ( + columnDefinitions.find( + (child) => getParsedAttribute(child, 'os') === defaultColumnDefinitionOs + )?.text ?? + columnDefinitions.find( + (child) => getParsedAttribute(child, 'os') === undefined + )?.text ?? + columnDefinitions[0]?.text + ); +}; const parseRows = async ( rawRows: RA, diff --git a/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchHooks.tsx b/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchHooks.tsx index a59d68078c7..62177d62253 100644 --- a/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchHooks.tsx +++ b/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchHooks.tsx @@ -52,14 +52,13 @@ export function usePrimarySearch( } async function fetchRelatedSearches(): Promise> { - return contextUnlockedPromise.then( - async (entrypoint) => - entrypoint === 'main' - ? ajax>('/context/available_related_searches.json', { - headers: { Accept: 'application/json' }, - cache: 'no-store', - }).then(({ data }) => data) - : foreverFetch>() + return contextUnlockedPromise.then(async (entrypoint) => + entrypoint === 'main' + ? ajax>('/context/available_related_searches.json', { + headers: { Accept: 'application/json' }, + cache: 'no-store', + }).then(({ data }) => data) + : foreverFetch>() ); } diff --git a/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchTask.tsx b/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchTask.tsx index 60686c9bccf..b392a409b6d 100644 --- a/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchTask.tsx +++ b/specifyweb/frontend/js_src/lib/components/Header/ExpressSearchTask.tsx @@ -125,11 +125,7 @@ function ExpressSearchInstructions({ {headerText.documentation()} )} - +
    @@ -147,10 +143,8 @@ export function ExpressSearchView(): JSX.Element { const [pendingQuery] = value; const [isConfigOpen, setIsConfigOpen] = React.useState(false); const [configRefreshTrigger, setConfigRefreshTrigger] = React.useState(0); - const [showInstructions = true, setShowExpressSearchInstructions] = useCachedState( - 'expressSearch', - 'showSearchTips' - ); + const [showInstructions = true, setShowExpressSearchInstructions] = + useCachedState('expressSearch', 'showSearchTips'); const canEditExpressSearchConfig = hasToolPermission('resources', 'read') && hasToolPermission('resources', 'create') && @@ -176,11 +170,15 @@ export function ExpressSearchView(): JSX.Element { setShowExpressSearchInstructions((value) => !value)} + onClick={(): void => + setShowExpressSearchInstructions((value) => !value) + } /> {showInstructions && ( - setShowExpressSearchInstructions(false)} /> + setShowExpressSearchInstructions(false)} + /> )}
    setQuery(pendingQuery)}>
    diff --git a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx index 26f4696c407..790b3373284 100644 --- a/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx +++ b/specifyweb/frontend/js_src/lib/components/Notifications/NotificationRenderers.tsx @@ -395,9 +395,7 @@ export const notificationRenderers: IR< ); }, 'collection-creation-starting'() { - return ( -

    {setupToolText.collectionCreationStarted()}

    - ); + return

    {setupToolText.collectionCreationStarted()}

    ; }, default(notification) { console.error('Unknown notification type', { notification }); diff --git a/specifyweb/frontend/js_src/lib/components/Permissions/index.ts b/specifyweb/frontend/js_src/lib/components/Permissions/index.ts index d1e7e50bcd4..353822c069e 100644 --- a/specifyweb/frontend/js_src/lib/components/Permissions/index.ts +++ b/specifyweb/frontend/js_src/lib/components/Permissions/index.ts @@ -64,7 +64,12 @@ export const getDerivedPermissions = () => derivedPermissions; const sortPolicies = (policy: typeof operationPolicies) => JSON.stringify( Object.fromEntries( - Object.entries(policy).sort(sortFunction(([key]) => key)) + Object.entries(policy) + .sort(sortFunction(([key]) => key)) + .map(([key, actions]) => [ + key, + Array.from(actions).sort(sortFunction((action) => action)), + ]) ) ); diff --git a/specifyweb/frontend/js_src/lib/components/WbPlanView/navigator.ts b/specifyweb/frontend/js_src/lib/components/WbPlanView/navigator.ts index fca97e9c6be..fbc359e53b4 100644 --- a/specifyweb/frontend/js_src/lib/components/WbPlanView/navigator.ts +++ b/specifyweb/frontend/js_src/lib/components/WbPlanView/navigator.ts @@ -713,4 +713,4 @@ export function getMappingLineData({ : filtered.filter( ({ customSelectSubtype }) => customSelectSubtype !== 'tree' ); -} \ No newline at end of file +} diff --git a/specifyweb/frontend/js_src/lib/localization/common.ts b/specifyweb/frontend/js_src/lib/localization/common.ts index 6ad133cdc01..d75f8bb7f83 100644 --- a/specifyweb/frontend/js_src/lib/localization/common.ts +++ b/specifyweb/frontend/js_src/lib/localization/common.ts @@ -104,13 +104,16 @@ export const commonText = createDictionary({ 'hr-hr': 'Savjeti za pretraživanje', }, expressSearchInstructions: { - 'en-us': 'Separate multiple search terms with spaces, use % anywhere, * at the beginning or end, and wrap terms in quotes for exact multi-word matches.', + 'en-us': + 'Separate multiple search terms with spaces, use % anywhere, * at the beginning or end, and wrap terms in quotes for exact multi-word matches.', }, expressSearchDateFormats: { - 'en-us': 'Dates can be searched using either the YYYY-MM-DD or MM/DD/YYYY format.', + 'en-us': + 'Dates can be searched using either the YYYY-MM-DD or MM/DD/YYYY format.', }, expressSearchPhraseExample: { - 'en-us': 'To search a term with spaces, wrap the phrase in quotes, for example "Clinton Lake".', + 'en-us': + 'To search a term with spaces, wrap the phrase in quotes, for example "Clinton Lake".', }, apply: { 'en-us': 'Apply', diff --git a/specifyweb/frontend/js_src/lib/localization/utils/config.ts b/specifyweb/frontend/js_src/lib/localization/utils/config.ts index e358f61952f..b70b574e01a 100644 --- a/specifyweb/frontend/js_src/lib/localization/utils/config.ts +++ b/specifyweb/frontend/js_src/lib/localization/utils/config.ts @@ -24,7 +24,7 @@ export const languageCodeMapper = { 'de-ch': 'de_CH', 'pt-br': 'pt_BR', 'hr-hr': 'hr', - 'nb': 'nb_NO' + nb: 'nb_NO', } as const; export const languages = Object.keys(languageCodeMapper); diff --git a/specifyweb/frontend/js_src/lib/utils/schemaVisibility.ts b/specifyweb/frontend/js_src/lib/utils/schemaVisibility.ts index fe41c0b752e..9c483b4168b 100644 --- a/specifyweb/frontend/js_src/lib/utils/schemaVisibility.ts +++ b/specifyweb/frontend/js_src/lib/utils/schemaVisibility.ts @@ -5,4 +5,4 @@ export function isSchemaFieldVisible( defaultFieldName?: string ): boolean { return showHiddenFields || !isHidden || fieldName === defaultFieldName; -} \ No newline at end of file +} diff --git a/specifyweb/specify/management/commands/run_key_migration_functions.py b/specifyweb/specify/management/commands/run_key_migration_functions.py index 33d679b7957..f2d2734d968 100644 --- a/specifyweb/specify/management/commands/run_key_migration_functions.py +++ b/specifyweb/specify/management/commands/run_key_migration_functions.py @@ -59,38 +59,50 @@ def apply_schema_overrides_for_all_disciplines(_apps): ) apply_schema_defaults_task.apply(args=[discipline.id]) + # PERF: The vast majority of these can be collapsed to a single call to + # update_table_schema_config_with_defaults funcs = [ # usc.update_all_table_schema_config_with_defaults, usc.create_geo_table_schema_config_with_defaults, # specify 0002 usc.create_cotype_splocalecontaineritem, # specify 0003 usc.create_strat_table_schema_config_with_defaults, # specify 0004 - getting skip warnings usc.create_agetype_picklist, # specify 0004 - usc.update_cog_type_fields, # specify 0007 + # BUG: This should really only be run in the context of the migration, + # and not on startup. See the below BUG comment above usc.update_hidden_prop + # usc.update_cog_type_fields, # specify 0007 usc.create_cogtype_picklist, # specify 0007 - usc.update_cogtype_splocalecontaineritem, # specify 0007 - usc.update_systemcogtypes_picklist, # specify 0007 - usc.update_cogtype_type_splocalecontaineritem, # specify 0007 + # BUG: These also shouldn't be run with this suite. These are one way + # data migrations in the contect of migrations meant to resolve + # eariler migrations. + # The functions can be destructive as we can't really discern whether + # or not these functions should be applied + # usc.update_cogtype_splocalecontaineritem, # specify 0007 + # usc.update_systemcogtypes_picklist, # specify 0007 + # usc.update_cogtype_type_splocalecontaineritem, # specify 0007 usc.update_relative_age_fields, # specify 0008 usc.add_cojo_to_schema_config, # specify 0012 usc.update_cog_schema_config, # specify 0013 usc.update_age_schema_config, # specify 0015 - usc.schemaconfig_fixes, # specify 0017 - usc.add_cot_catnum_to_schema, # specify 0018 + # usc.schemaconfig_fixes, # specify 0017 + # usc.add_cot_catnum_to_schema, # specify 0018 usc.add_tectonicunit_to_pc_in_schema_config, # specify 0020 - usc.fix_hidden_geo_prop, # specify 0021 - usc.update_schema_config_field_desc, # specify 0023 - usc.update_hidden_prop, # specify 0023 + # usc.fix_hidden_geo_prop, # specify 0021 + # usc.update_schema_config_field_desc, # specify 0023 + # BUG: We can't reliably run this function at startup, as there is no + # easy way to differentiate Schema Config tables/fields that should or + # should not be updated for already existing Disciplines. + # usc.update_hidden_prop, # specify 0023 usc.update_storage_unique_id_fields, # specify 0024 - usc.update_co_children_fields, # specify 0027 - usc.remove_collectionobject_parentco, # specify 0029 - usc.add_quantities_gift, # specify 0032 - usc.update_paleo_desc, # specify 0033 - usc.update_accession_date_fields, # specify 0034 + # usc.update_co_children_fields, # specify 0027 + # usc.remove_collectionobject_parentco, # specify 0029 + # usc.add_quantities_gift, # specify 0032 + # usc.update_paleo_desc, # specify 0033 + # usc.update_accession_date_fields, # specify 0034 usc.update_loan_and_gift_agent_fields, # specify 0039 - usc.update_loan_and_gift_agents, # specify 0039 - usc.componets_schema_config_migrations, # specify 0040 + usc.remove_componentparent_item, # specify 0040 + usc.create_table_schema_config_with_defaults, # specify 0040 usc.create_discipline_type_picklist, # specify 0042 - usc.update_discipline_type_splocalecontaineritem, # specify 0042 + # usc.update_discipline_type_splocalecontaineritem, # specify 0042 apply_schema_overrides_for_all_disciplines, usc.deduplicate_schema_config_orm, ] diff --git a/specifyweb/specify/migration_utils/default_cots.py b/specifyweb/specify/migration_utils/default_cots.py index 000b7572053..1259f18b55e 100644 --- a/specifyweb/specify/migration_utils/default_cots.py +++ b/specifyweb/specify/migration_utils/default_cots.py @@ -68,7 +68,7 @@ def create_cogtype_type_picklist(apps, using='default'): Picklistitem = apps.get_model('specify', 'Picklistitem') for collection in Collection.objects.using(using).all(): - cog_type_picklist, _ = Picklist.objects.using(using).get_or_create( + cog_type_picklist, picklist_created = Picklist.objects.using(using).get_or_create( name='SystemCOGTypes', # Default Collection Object Group Types type=0, collection=collection, @@ -77,12 +77,13 @@ def create_cogtype_type_picklist(apps, using='default'): "readonly": False, } ) - for cog_type in DEFAULT_COG_TYPES: - Picklistitem.objects.using(using).get_or_create( - title=cog_type, - value=cog_type, - picklist=cog_type_picklist - ) + if picklist_created: + for cog_type in DEFAULT_COG_TYPES: + Picklistitem.objects.using(using).get_or_create( + title=cog_type, + value=cog_type, + picklist=cog_type_picklist + ) COTYPE_PICKLIST_NAME = 'CollectionObjectType' FIELD_NAME = 'collectionObjectType' diff --git a/specifyweb/specify/migration_utils/sp7_schemaconfig.py b/specifyweb/specify/migration_utils/sp7_schemaconfig.py index 7e4fce0c947..38e5ba48a88 100644 --- a/specifyweb/specify/migration_utils/sp7_schemaconfig.py +++ b/specifyweb/specify/migration_utils/sp7_schemaconfig.py @@ -411,23 +411,6 @@ 'Gift': ['agent1', 'agent2', 'agent3', 'agent4', 'agent5'], } -MIGRATION_0038_UPDATE_FIELDS = { - 'Loan': [ - ('agent1','Agent 1','Agent 1'), - ('agent2','Agent 2','Agent 2'), - ('agent3','Agent 3','Agent 3'), - ('agent4','Agent 4','Agent 4'), - ('agent5','Agent 5','Agent 5'), - ], - 'Gift': [ - ('agent1','Agent 1','Agent 1'), - ('agent2','Agent 2','Agent 2'), - ('agent3','Agent 3','Agent 3'), - ('agent4','Agent 4','Agent 4'), - ('agent5','Agent 5','Agent 5'), - ] -} - MIGRATION_0040_TABLES = [ ('Component', None), ] diff --git a/specifyweb/specify/migration_utils/tectonic_ranks.py b/specifyweb/specify/migration_utils/tectonic_ranks.py index c2afe88c6fa..d2bc97482de 100644 --- a/specifyweb/specify/migration_utils/tectonic_ranks.py +++ b/specifyweb/specify/migration_utils/tectonic_ranks.py @@ -15,14 +15,28 @@ def create_default_tectonic_ranks(apps): if not tectonic_tree_def: tectonic_tree_def, _ = TectonicTreeDef.objects.get_or_create(name="Tectonic Unit", discipline=discipline) - root, _ = TectonicUnitTreeDefItem.objects.get_or_create( - name="Root", - title="Root", + root, root_created = TectonicUnitTreeDefItem.objects.get_or_create( rankid=0, parent=None, treedef=tectonic_tree_def, - isenforced=True + defaults={ + "name": "Root", + "title": "Root", + "isenforced": True + } ) + # The root rank already exists in some capacity in the Discipline + # We can assume the user has made modifications to the tree at this + # point, so shouldn't go further with checking/creating lower ranks + if not root_created: + # BUG?: handle setting the tectonicunittreedef on the Discipline + # here? We can probably practically assume it's already set if the + # root node exists. + continue + + # At this point, these get_or_create calls should always be the + # equivalent of create (as we know the root node didn't exist). + # But keeping the get_or_create here just because superstructure, _ = TectonicUnitTreeDefItem.objects.get_or_create( name="Superstructure", title="Superstructure", @@ -91,23 +105,25 @@ def create_root_tectonic_node(apps): for discipline in Discipline.objects.all(): - tectonic_tree_def = TectonicUnitTreeDef.objects.filter(name="Tectonic Unit", discipline=discipline).first() + tectonic_tree_def = TectonicUnitTreeDef.objects.filter(discipline=discipline).first() if not tectonic_tree_def: tectonic_tree_def, is_created = TectonicUnitTreeDef.objects.get_or_create( name="Tectonic Unit", discipline=discipline ) - tectonic_tree_def_item = TectonicUnitTreeDefItem.objects.filter(treedef=tectonic_tree_def, name="Root").first() + tectonic_tree_def_item = TectonicUnitTreeDefItem.objects.filter(treedef=tectonic_tree_def, rankid=0, parent=None).first() if not tectonic_tree_def_item: tectonic_tree_def_item, is_created = TectonicUnitTreeDefItem.objects.get_or_create( name="Root", title="Root", treedef=tectonic_tree_def, + rankid=0, + parent=None, isenforced=True ) - root = TectonicUnit.objects.filter(name="Root", definition=tectonic_tree_def).first() + root = TectonicUnit.objects.filter(definition=tectonic_tree_def, definitionitem=tectonic_tree_def_item, rankid=0, parent=None).first() if not root: root, is_created = TectonicUnit.objects.get_or_create( name="Root", @@ -123,7 +139,7 @@ def create_root_tectonic_node(apps): if is_created: logger.info(f"Created root tectonic unit for discipline {discipline.name}") - TectonicUnitTreeDefItem.objects.filter(rankid=0, isenforced__isnull=True).update(isenforced=True) + TectonicUnitTreeDefItem.objects.filter(parent=None,rankid=0, isenforced__isnull=True).update(isenforced=True) def revert_create_root_tectonic_node(apps, schema_editor=None): TectonicUnit = apps.get_model('specify', 'TectonicUnit') diff --git a/specifyweb/specify/migration_utils/update_schema_config.py b/specifyweb/specify/migration_utils/update_schema_config.py index 5d5e2b89f84..b91927b90a6 100644 --- a/specifyweb/specify/migration_utils/update_schema_config.py +++ b/specifyweb/specify/migration_utils/update_schema_config.py @@ -1,17 +1,19 @@ import re import json -from typing import NamedTuple, Tuple +from typing import NamedTuple, Tuple, TypedDict, NotRequired import logging from collections import defaultdict from functools import lru_cache from pathlib import Path -from django.db.models import Q, Count +from django.db.models import Q, Count, Window, F, Exists, OuterRef from django.conf import settings from django.apps import apps as global_apps +from django.core.exceptions import MultipleObjectsReturned from django.db import connection, transaction +from django.db.models.functions import RowNumber from specifyweb.specify.models_utils.load_datamodel import FieldDoesNotExistError, TableDoesNotExistError @@ -45,7 +47,6 @@ MIGRATION_0034_UPDATE_FIELDS, MIGRATION_0035_FIELDS, MIGRATION_0038_FIELDS, - MIGRATION_0038_UPDATE_FIELDS, MIGRATION_0040_TABLES, MIGRATION_0040_FIELDS, MIGRATION_0040_UPDATE_FIELDS, @@ -233,7 +234,6 @@ def bulk_create_splocaleitemstr_idempotent(Splocaleitemstr, rows: list[dict]) -> key = (r["language"], r[fk_field].pk) desired_by_key[key] = r - rows_to_update = [] ids_to_delete: set[int] = set() to_create = [] for key, desired_row in desired_by_key.items(): @@ -243,88 +243,29 @@ def bulk_create_splocaleitemstr_idempotent(Splocaleitemstr, rows: list[dict]) -> to_create.append(Splocaleitemstr(**desired_row)) continue - keeper = existing_for_key[0] - if keeper.text != desired_row["text"]: - keeper.text = desired_row["text"] - rows_to_update.append(keeper) - for duplicate in existing_for_key[1:]: ids_to_delete.add(duplicate.id) if ids_to_delete: Splocaleitemstr.objects.filter(id__in=ids_to_delete).delete() - if rows_to_update: - Splocaleitemstr.objects.bulk_update(rows_to_update, ["text"]) - if to_create: Splocaleitemstr.objects.bulk_create(to_create) total_created += len(to_create) return total_created -def _lock_schema_config_discipline(apps, discipline_id: int) -> None: - DisciplineModel = apps.get_model('specify', 'Discipline') - DisciplineModel.objects.select_for_update().get(id=discipline_id) - -def _get_or_create_schema_config_row(model, attrs: dict, defaults: dict | None = None): - row = model.objects.filter(**attrs).order_by("id").first() - if row is not None: - return row, False - return model.objects.create(**{**attrs, **(defaults or {})}), True - -def _locale_string_key(row) -> tuple[str, str, str]: - return ( - row.language, - row.country or "", - row.variant or "", - ) - -def _repoint_unique_locale_strings( - ItemStr, - fk_field: str, - source_ids: list[int], - target_id: int, -) -> None: - if len(source_ids) == 0: - return - - fk_id_field = f"{fk_field}_id" - existing_keys = { - _locale_string_key(row) - for row in ItemStr.objects.filter(**{fk_id_field: target_id}).only( - "language", - "country", - "variant", - ) - } - - ids_to_update: list[int] = [] - ids_to_delete: list[int] = [] - for row in ItemStr.objects.filter( - **{f"{fk_id_field}__in": source_ids} - ).only("id", "language", "country", "variant").order_by("id"): - key = _locale_string_key(row) - if key in existing_keys: - ids_to_delete.append(row.id) - else: - existing_keys.add(key) - ids_to_update.append(row.id) - - if ids_to_delete: - ItemStr.objects.filter(id__in=ids_to_delete).delete() - - if ids_to_update: - ItemStr.objects.filter(id__in=ids_to_update).update( - **{fk_id_field: target_id} - ) +class TableDefaults(TypedDict): + name: NotRequired[str] + desc: NotRequired[str] + items: "NotRequired[dict[str, FieldDefaults]]" def update_table_schema_config_with_defaults( table_name, discipline_id: int, - description: str = None, + description: str | None = None, apps = global_apps, - defaults: dict = None, + defaults: TableDefaults | None = None, pending_itemstr_rows: list[dict] | None = None, ): Splocalecontainer = apps.get_model('specify', 'Splocalecontainer') @@ -347,7 +288,7 @@ def update_table_schema_config_with_defaults( pending_itemstr_rows = [] try: - table_defaults = defaults if defaults is not None else dict() + table_defaults = defaults if defaults is not None else TableDefaults() table_name_str = table_defaults.get('name', camel_to_spaced_title_case(uncapitilize(table.name))) table_desc_str = table_defaults.get('desc', camel_to_spaced_title_case(uncapitilize(table.name))) @@ -362,26 +303,26 @@ def update_table_schema_config_with_defaults( container_attrs = { "name": table_config.name.lower(), "discipline_id": discipline_id, - "schematype": table_config.schema_type, + "schematype": table_config.schema_type } - with transaction.atomic(): - _lock_schema_config_discipline(apps, discipline_id) - sp_local_container, _ = _get_or_create_schema_config_row( - Splocalecontainer, - container_attrs, - { - "ishidden": False, - "issystem": table.system, - "version": 0, - }, - ) + fetched_sp_locale_container = Splocalecontainer.objects.filter(**container_attrs).order_by("id").first() + + if fetched_sp_locale_container is None: + sp_local_container = Splocalecontainer.objects.create(**{ + **container_attrs, + "ishidden": False, + "issystem": table.system, + "version": 0, + }) + else: + sp_local_container = fetched_sp_locale_container - if Splocalecontaineritem.objects.filter( - container=sp_local_container, - name=table_config.name.lower(), - ).exists(): - return + if Splocalecontaineritem.objects.filter( + container=sp_local_container, + name=table_config.name.lower(), + ).exists(): + return item_str_rows = [] for k, text in { @@ -399,9 +340,7 @@ def update_table_schema_config_with_defaults( pending_itemstr_rows.extend(item_str_rows) - for field in table.all_fields: - if field is table.idField: - continue + for field in table._all_fields(exclude_id_field=True): field_defaults = None if table_defaults.get('items'): field_defaults = table_defaults['items'].get(field.name.lower()) @@ -435,12 +374,19 @@ def revert_table_schema_config(table_name, apps=global_apps): items.delete() containers.delete() +class FieldDefaults(TypedDict): + name: NotRequired[str] + desc: NotRequired[str] + ishidden: NotRequired[bool] + isrequired: NotRequired[bool] + picklistname: NotRequired[str] + def update_table_field_schema_config_with_defaults( table_name, discipline_id: int, field_name: str, apps = global_apps, - defaults: dict = None, + defaults: FieldDefaults | None = None, pending_itemstr_rows: list[dict] | None = None, ): table = datamodel.get_table(table_name) @@ -464,6 +410,19 @@ def update_table_field_schema_config_with_defaults( Splocaleitemstr = apps.get_model('specify', 'Splocaleitemstr') Splocalecontaineritem = apps.get_model('specify', 'Splocalecontaineritem') + try: + sp_local_container, _ = Splocalecontainer.objects.get_or_create( + name=table.name.lower(), + discipline_id=discipline_id, + schematype=table_config.schema_type, + ) + except MultipleObjectsReturned: + sp_local_container = Splocalecontainer.objects.filter( + name=table.name.lower(), + discipline_id=discipline_id, + schematype=table_config.schema_type + ).first() + try: field = table.get_field_strict(field_name) except FieldDoesNotExistError: @@ -500,33 +459,26 @@ def update_table_field_schema_config_with_defaults( language="en" ) - container_attrs = { - "name": table.name.lower(), - "discipline_id": discipline_id, - "schematype": table_config.schema_type, + container_item_attrs = { + "name": field_config.name, + "container": sp_local_container } - with transaction.atomic(): - _lock_schema_config_discipline(apps, discipline_id) - sp_local_container, _ = _get_or_create_schema_config_row( - Splocalecontainer, - container_attrs, - ) - sp_locale_container_item, _ = _get_or_create_schema_config_row( - Splocalecontaineritem, - { - "name": field_config.name, - "container": sp_local_container, - }, - { - "type": field_config.java_type, - "ishidden": field_hidden, - "isrequired": field_required, - "issystem": table.system, - "version": 0, - "picklistname": picklist_name, - }, + fetched_sp_locale_container_item = Splocalecontaineritem.objects.filter(**container_item_attrs).order_by("id").first() + + if fetched_sp_locale_container_item is None: + sp_locale_container_item = Splocalecontaineritem.objects.create(**{ + **container_item_attrs, + "type": field_config.java_type, + "ishidden": field_hidden, + "isrequired": field_required, + "issystem": table.system, + "version": 0, + "picklistname": picklist_name + } ) + else: + sp_locale_container_item = fetched_sp_locale_container_item itm_str_rows = [] for k, text in { @@ -583,16 +535,18 @@ def update_table_field_schema_config_params( Splocalecontainer = apps.get_model('specify', 'Splocalecontainer') Splocalecontaineritem = apps.get_model('specify', 'Splocalecontaineritem') - with transaction.atomic(): - _lock_schema_config_discipline(apps, discipline_id) - sp_local_container, _ = _get_or_create_schema_config_row( - Splocalecontainer, - { - "name": table.name.lower(), - "discipline_id": discipline_id, - "schematype": table_config.schema_type, - }, + try: + sp_local_container, _ = Splocalecontainer.objects.get_or_create( + name=table.name.lower(), + discipline_id=discipline_id, + schematype=table_config.schema_type, ) + except MultipleObjectsReturned: + sp_local_container = Splocalecontainer.objects.filter( + name=table.name.lower(), + discipline_id=discipline_id, + schematype=table_config.schema_type + ).first() try: field = table.get_field_strict(field_name) @@ -664,18 +618,15 @@ def find_missing_schema_config_fields(discipline_id: int, apps=global_apps): if table_name_lower not in container_names: missing_tables.append(table_name) missing_fields[table_name] = sorted( - field.name for field in table.all_fields - if field.name and field is not table.idField + field.name for field in table._all_fields(exclude_id_field=True) if field.name ) continue existing_fields = existing_fields_by_table.get(table_name_lower, set()) missing_in_table = sorted( # sort for better reproducablity field.name - for field in table.all_fields - if field.name - and field is not table.idField - and field.name.lower() not in existing_fields + for field in table._all_fields(exclude_id_field=True) + if field.name and field.name.lower() not in existing_fields ) if missing_in_table: @@ -777,104 +728,77 @@ def deduplicate_splocalecontainers(apps): # container items and strings, but this should be minimally sufficient # without sacrificing complexity and speed # See #7988 - duplicate_groups = ( - Container.objects.filter(schematype=0) - .values("discipline_id", "name", "schematype") - .annotate(count=Count("id")) - .filter(count__gt=1) - ) - - for group in list(duplicate_groups): - containers = list( + duplicate_containers = Container.objects.filter(schematype=0).annotate( + earlier_exists=Exists( Container.objects.filter( - discipline_id=group["discipline_id"], - name=group["name"], - schematype=group["schematype"], - ).order_by("id") - ) - if len(containers) <= 1: - continue - - # Remove the items and strings shouldn't be strictly neccesary as they - # should both cascade if we call duplicate_containers.delete() - # But this is the safer option for any edge cases with historical - # models in migrations and if we ever decide to change the delete - # behavior later down the line - # Plus, I don't think the performance impact should be **that** - # significantly different... - keeper = containers[0] - duplicate_ids = [container.id for container in containers[1:]] - - _repoint_unique_locale_strings( - ItemStr, "containername", duplicate_ids, keeper.id - ) - _repoint_unique_locale_strings( - ItemStr, "containerdesc", duplicate_ids, keeper.id + discipline_id=OuterRef('discipline_id'), + schematype=0, + name=OuterRef('name'), + timestampcreated__lt=OuterRef('timestampcreated') + ) ) + ).filter(earlier_exists=True) - keeper_items_by_name = {} - for item in ContainerItem.objects.filter(container=keeper).order_by("id"): - keeper_items_by_name.setdefault(item.name, item) - - duplicate_items = ContainerItem.objects.filter( - container_id__in=duplicate_ids - ).order_by("id") - for item in duplicate_items: - keeper_item = keeper_items_by_name.get(item.name) - if keeper_item is None: - ContainerItem.objects.filter(id=item.id).update( - container_id=keeper.id - ) - item.container_id = keeper.id - keeper_items_by_name[item.name] = item - continue - - _repoint_unique_locale_strings( - ItemStr, "itemname", [item.id], keeper_item.id - ) - _repoint_unique_locale_strings( - ItemStr, "itemdesc", [item.id], keeper_item.id - ) - ContainerItem.objects.filter(id=item.id).delete() + # Remove the items and strings shouldn't be strictly neccesary as they + # should both cascade if we call duplicate_containers.delete() + # But this is the safer option for any edge cases with historical + # models in migrations and if we ever decide to change the delete + # behavior later down the line + # Plus, I don't think the performance impact should be **that** + # significantly different... + duplicate_items = ContainerItem.objects.filter(container__in=duplicate_containers) + ItemStr.objects.filter(itemname__in=duplicate_items).delete() + ItemStr.objects.filter(itemdesc__in=duplicate_items).delete() + duplicate_items.delete() - Container.objects.filter(id__in=duplicate_ids).delete() + ItemStr.objects.filter(containername__in=duplicate_containers).delete() + ItemStr.objects.filter(containerdesc__in=duplicate_containers).delete() + duplicate_containers.delete() def deduplicate_containeritems_and_strings(apps): ContainerItem = apps.get_model('specify', 'SpLocaleContainerItem') ItemStr = apps.get_model('specify', 'SpLocaleItemStr') with transaction.atomic(): - duplicate_groups = ( - ContainerItem.objects.filter(container__schematype=0) - .values("container_id", "name") - .annotate(count=Count("id")) - .filter(count__gt=1) + # Identify duplicate container items using a Window function. + # Partition by container_id + item name only. + # Only schema type 0 containers (standard schema) are eligible for this cleanup. + # The schema type 1 refers to the WorkBench Schema from Specify 6, which has + # a different structure and should not be modified by this cleanup. + # + # Why this key: + # - Rows are only true duplicates when they refer to the same concrete + # container row and the same field name. + # - Earlier broad grouping by discipline/container-name/field-name could + # collapse valid rows from different containers that happened to share + # names, causing missing Schema Config fields after dedupe. + # - This narrower key preserves legitimate rows and only removes + # duplicates that are semantically equivalent. + qs = ContainerItem.objects.filter( + container__schematype=0, + ).annotate( + rn=Window( + expression=RowNumber(), + partition_by=[ + F('container_id'), + F('name') + ], + order_by=F('id').asc() + ) ) - deleted_count = 0 - for group in list(duplicate_groups): - duplicate_items = list( - ContainerItem.objects.filter( - container_id=group["container_id"], - name=group["name"], - ).order_by("id") - ) - if len(duplicate_items) <= 1: - continue + # Extract the IDs of the duplicates, keep the first and delete the rest + ids_to_delete = [item.id for item in qs if item.rn > 1] - item_to_keep = duplicate_items[0] - ids_to_delete = [item.id for item in duplicate_items[1:]] - _repoint_unique_locale_strings( - ItemStr, "itemname", ids_to_delete, item_to_keep.id - ) - _repoint_unique_locale_strings( - ItemStr, "itemdesc", ids_to_delete, item_to_keep.id - ) + if ids_to_delete: + # Delete dependent strings using corrected field names + ItemStr.objects.filter(itemname_id__in=ids_to_delete).delete() + ItemStr.objects.filter(itemdesc_id__in=ids_to_delete).delete() + + # Delete the duplicate Container Items ContainerItem.objects.filter(id__in=ids_to_delete).delete() - deleted_count += len(ids_to_delete) - - if deleted_count > 0: - print(f"Successfully deleted {deleted_count} duplicate schema items.") + + print(f"Successfully deleted {len(ids_to_delete)} duplicate schema items.") else: print("No duplicates found.") @@ -917,6 +841,8 @@ def create_geo_table_schema_config_with_defaults(apps): COT_FIELD_NAME = 'collectionObjectType' COT_TEXT = 'Collection Object Type' +# FEAT: Replace this implementation with +# update_table_field_schema_config_with_defaults def create_cotype_splocalecontaineritem(apps): Splocalecontainer = apps.get_model('specify', 'Splocalecontainer') Splocalecontaineritem = apps.get_model('specify', 'Splocalecontaineritem') @@ -925,23 +851,40 @@ def create_cotype_splocalecontaineritem(apps): # Create a Splocalecontaineritem record for each CollectionObject Splocalecontainer # NOTE: Each discipline has its own CollectionObject Splocalecontainer for container in Splocalecontainer.objects.filter(name='collectionobject', schematype=0): - container_item, _ = Splocalecontaineritem.objects.get_or_create( - name=COT_FIELD_NAME, - picklistname=COT_PICKLIST_NAME, - type='ManyToOne', - container=container, - isrequired=True - ) - Splocaleitemstr.objects.get_or_create( - language='en', - text=COT_TEXT, - itemname=container_item - ) - Splocaleitemstr.objects.get_or_create( - language='en', - text=COT_TEXT, - itemdesc=container_item - ) + container_item_attrs = { + "name": COT_FIELD_NAME, + "container": container + } + container_item = Splocalecontaineritem.objects.filter(**container_item_attrs).order_by("id").first() + if container_item is None: + resolved_item = Splocalecontaineritem.objects.create( + **container_item_attrs, + picklistname=COT_PICKLIST_NAME, + type="ManyToOne", + isrequired=True + ) + else: + resolved_item = container_item + + field_label_attrs = { + "language": "en", + "itemname":resolved_item + } + + field_label = Splocaleitemstr.objects.filter(**field_label_attrs).order_by("id").first() + + if field_label is None: + Splocaleitemstr.objects.create(**field_label_attrs, text=COT_TEXT) + + field_desc_attrs = { + "language": "en", + "itemdesc":resolved_item + } + + field_desc = Splocaleitemstr.objects.filter(**field_desc_attrs).order_by("id").first() + + if field_desc is None: + Splocaleitemstr.objects.create(**field_desc_attrs, text=COT_TEXT) # ########################################## # Used in 0004_stratigraphy_age.py @@ -1199,7 +1142,10 @@ def revert_update_cog_schema_config(apps): def update_age_schema_config(apps): # Revert before adding to avoid duplicates - revert_update_age_schema_config(apps) + # BUG: This will delete people's potentially modified Schema Config items + # If we want to avoid duplicates, we should check the creation code and + # prevent duplicates being created there + # revert_update_age_schema_config(apps) Discipline = apps.get_model('specify', 'Discipline') for discipline in Discipline.objects.all(): @@ -2014,90 +1960,26 @@ def revert_version_required(apps): def update_loan_and_gift_agent_fields(apps): Discipline = apps.get_model('specify', 'Discipline') + field_defaults = { + "ishidden": True + } for discipline in Discipline.objects.all(): for table, fields in MIGRATION_0038_FIELDS.items(): for field_name in fields: - update_table_field_schema_config_with_defaults(table, discipline.id, field_name, apps) + update_table_field_schema_config_with_defaults(table, discipline.id, field_name, apps, defaults=field_defaults) def revert_loan_and_gift_agent_fields(apps): for table, fields in MIGRATION_0038_FIELDS.items(): for field_name in fields: revert_table_field_schema_config(table, field_name, apps) -def update_loan_and_gift_agents(apps): - """ - Update field descriptions and display names using MIGRATION_0038_UPDATE_FIELDS - (tuple: (fieldName, newLabel, newDesc)). - """ - Splocalecontainer = apps.get_model('specify', 'Splocalecontainer') - Splocalecontaineritem = apps.get_model('specify', 'Splocalecontaineritem') - Splocaleitemstr = apps.get_model('specify', 'Splocaleitemstr') - - def upsert_single_str(*, itemdesc_id=None, itemname_id=None, text=""): - if (itemdesc_id is None) == (itemname_id is None): - raise ValueError("Exactly one of itemdesc_id or itemname_id must be provided") - - qs = Splocaleitemstr.objects.filter( - itemdesc_id=itemdesc_id, - itemname_id=itemname_id, - ).order_by("id") - - obj = qs.first() - if obj is None: - return Splocaleitemstr.objects.create( - itemdesc_id=itemdesc_id, - itemname_id=itemname_id, - text=text, - ) - - qs.exclude(id=obj.id).delete() - - if obj.text != text: - obj.text = text - obj.save(update_fields=["text"]) - - return obj - - for table, fields in MIGRATION_0038_UPDATE_FIELDS.items(): - containers = Splocalecontainer.objects.filter(name=table.lower()) - - for container in containers: - for (field_name, new_name, new_desc) in fields: - items = Splocalecontaineritem.objects.filter( - container=container, - name=field_name.lower(), - ) - - for item in items: - # Hide the existing field - if not item.ishidden: - item.ishidden = True - item.save(update_fields=["ishidden"]) - - upsert_single_str(itemdesc_id=item.id, text=new_desc) - upsert_single_str(itemname_id=item.id, text=new_name) - -def revert_loan_and_gift_agents(apps): - """ - Revert the field name/description updates. - """ - Splocalecontainer = apps.get_model('specify', 'Splocalecontainer') - Splocalecontaineritem = apps.get_model('specify', 'Splocalecontaineritem') - - for table, fields in MIGRATION_0038_UPDATE_FIELDS.items(): - containers = Splocalecontainer.objects.filter(name=table.lower()) - for container in containers: - for (field_name, _, _) in fields: - items = Splocalecontaineritem.objects.filter( - container=container, - name=field_name.lower() - ) - # If needed, reset ishidden or revert text - # ########################################## # Used in 0040_components.py # ########################################## +def remove_componentparent_item(apps): + revert_table_field_schema_config("CollectionObject", "componentParent", apps) + def remove_0029_schema_config_fields(apps, schema_editor=None): Splocalecontaineritem = apps.get_model('specify', 'Splocalecontaineritem') Splocaleitemstr = apps.get_model('specify', 'Splocaleitemstr') @@ -2216,14 +2098,6 @@ def reverse_hide_component_fields(apps, schema_editor=None): name=field_name.lower() ) items.update(ishidden=True) - -def componets_schema_config_migrations(apps, schema_editor=None): - remove_0029_schema_config_fields(apps, schema_editor) - create_table_schema_config_with_defaults(apps, schema_editor) - update_schema_config_field_desc(apps, schema_editor) - update_hidden_prop(apps, schema_editor) - create_cotype_splocalecontaineritem(apps) - hide_component_fields(apps, schema_editor) # ########################################## # Used in 0042_discipline_type_picklist.py diff --git a/specifyweb/specify/migrations/0039_agent_fields_for_loan_and_gift.py b/specifyweb/specify/migrations/0039_agent_fields_for_loan_and_gift.py index b6109646b4b..d5f1e0d9435 100644 --- a/specifyweb/specify/migrations/0039_agent_fields_for_loan_and_gift.py +++ b/specifyweb/specify/migrations/0039_agent_fields_for_loan_and_gift.py @@ -8,10 +8,8 @@ def consolidated_0038_forward(apps, schema_editor): usc.update_loan_and_gift_agent_fields(apps) - usc.update_loan_and_gift_agents(apps) def consolidated_0038_backward(apps, schema_editor): - usc.revert_loan_and_gift_agents(apps) usc.revert_loan_and_gift_agent_fields(apps) class Migration(migrations.Migration): diff --git a/specifyweb/specify/models_utils/load_datamodel.py b/specifyweb/specify/models_utils/load_datamodel.py index dbe2c8f8f72..67e1bfbe136 100644 --- a/specifyweb/specify/models_utils/load_datamodel.py +++ b/specifyweb/specify/models_utils/load_datamodel.py @@ -149,19 +149,29 @@ def name(self) -> str: raise ValueError("classname is required to compute the name") return self.classname.split(".")[-1] + def _all_fields(self, exclude_fields=False, exclude_relationships=False, exclude_id_field=False, exclude_virtual_fields=True) -> Iterable[Union["Field", "Relationship"]]: + if not exclude_fields: + yield from self.fields or [] # Handle None by using an empty list + if not exclude_relationships: + yield from self.relationships or [] # Handle None by using an empty list + if not exclude_virtual_fields: + yield from self.virtual_fields or [] + if (not exclude_id_field) and self.idField is not None: + yield self.idField + @property def django_name(self) -> str: return self.name.capitalize() @property def all_fields(self) -> list[Union["Field", "Relationship"]]: - def af() -> Iterable[Union["Field","Relationship"]]: - yield from self.fields or [] # Handle None by using an empty list - yield from self.relationships or [] # Handle None by using an empty list - if self.idField is not None: - yield self.idField - - return list(af()) + """ + A list of all non-virtual fields (including the ID field) and + relationships for the table. + If you need more granularity over which fields to return, use + _all_fields or a filter object + """ + return list(self._all_fields()) def is_virtual_field(self, fieldname: str) -> bool: diff --git a/specifyweb/specify/tests/test_delete_blockers.py b/specifyweb/specify/tests/test_delete_blockers.py index d6aeab5fe59..19c3044bbf2 100644 --- a/specifyweb/specify/tests/test_delete_blockers.py +++ b/specifyweb/specify/tests/test_delete_blockers.py @@ -1,9 +1,11 @@ from django.test import Client import json -from specifyweb.backend.permissions.models import UserPolicy +from specifyweb.specify import models +from django.db import router +from django.test import TestCase +from specifyweb.backend.delete_blockers.views import _collect_delete_blockers from specifyweb.backend.trees.tests.test_trees import GeographyTree -from specifyweb.backend.businessrules.exceptions import BusinessRuleException from specifyweb.specify import models from specifyweb.specify.api.crud import delete_resource @@ -33,8 +35,20 @@ def _assertSame(self, base, other): def _get_blockers(self, obj): response = self.c.get(_url(obj)) + self.assertEqual( + response.status_code, + 200, + f"ERROR: {response.content.decode()}", + ) return json.loads(response.content.decode()) + def _assertContains(self, blockers, expected): + normalized = [ + {**obj, 'ids': sorted(obj['ids'])} + for obj in blockers + ] + self.assertIn({**expected, 'ids': sorted(expected['ids'])}, normalized) + def test_simple_agent_delete_blockers(self): prep_list = [] @@ -68,107 +82,85 @@ def test_children_dont_block_deletion(self): for node in self._node_list: self._assertSame(self._get_blockers(node), []) - def _create_discipline_with_owned_trees(self, name='Disposable Discipline'): - placeholder_geo = models.Geographytreedef.objects.create(name=f'{name} placeholder geo') - placeholder_geo_time = models.Geologictimeperiodtreedef.objects.create( - name=f'{name} placeholder geotime' + def test_many_to_many_join_blockers_are_normalized(self): + export_schema = models.Spexportschema.objects.create( + discipline=self.discipline ) - - discipline = models.Discipline.objects.create( - name=name, - type='paleobotany', - division=self.division, - datatype=self.datatype, - geographytreedef=placeholder_geo, - geologictimeperiodtreedef=placeholder_geo_time, + export_mapping = models.Spexportschemamapping.objects.create( + collectionmemberid=self.collection.id ) + export_schema.mappings.add(export_mapping) - geography_tree = models.Geographytreedef.objects.create( - name=f'{name} geography', - discipline=discipline, - ) - geography_rank = models.Geographytreedefitem.objects.create( - name='Planet', - rankid=0, - treedef=geography_tree, - ) - models.Geography.objects.create( - name='Earth', - rankid=0, - definition=geography_tree, - definitionitem=geography_rank, - ) + delete_blockers = self._get_blockers(export_schema) - geotime_tree = models.Geologictimeperiodtreedef.objects.create( - name=f'{name} geotime', - discipline=discipline, - ) - geotime_rank = models.Geologictimeperiodtreedefitem.objects.create( - name='Root', - rankid=0, - treedef=geotime_tree, + expected = [ + dict( + table='SpExportSchemaMapping', + field='spExportSchemas', + ids=[export_mapping.id], + ) + ] + + self._assertSame(delete_blockers, expected) + +class TestDeleteBlockersCascade(TestCase): + + def _assertContains(self, blockers, expected): + normalized = [ + {**obj, 'ids': sorted(obj['ids'])} + for obj in blockers + ] + self.assertIn({**expected, 'ids': sorted(expected['ids'])}, normalized) + + def test_division_collects_normalized_cascaded_discipline_blockers(self): + institution = models.Institution.objects.create( + name='Test Institution', + isaccessionsglobal=True, + issecurityon=False, + isserverbased=False, + issharinglocalities=True, + issinglegeographytree=True, ) - models.Geologictimeperiod.objects.create( - name='Root', - rankid=0, - definition=geotime_tree, - definitionitem=geotime_rank, + division = models.Division.objects.create( + institution=institution, + name='Test Division', ) - - taxon_tree = models.Taxontreedef.objects.create( - name=f'{name} taxon', - discipline=discipline, + geologictimeperiodtreedef = models.Geologictimeperiodtreedef.objects.create( + name='Test gtptd' ) - taxon_rank = models.Taxontreedefitem.objects.create( - name='Life', - rankid=0, - treedef=taxon_tree, + geographytreedef = models.Geographytreedef.objects.create( + name='Test gtd' ) - models.Taxon.objects.create( - name='Life', - rankid=0, - definition=taxon_tree, - definitionitem=taxon_rank, + datatype = models.Datatype.objects.create(name='Test datatype') + discipline = models.Discipline.objects.create( + geologictimeperiodtreedef=geologictimeperiodtreedef, + geographytreedef=geographytreedef, + division=division, + datatype=datatype, + type='paleobotany', ) - - discipline.geographytreedef = geography_tree - discipline.geologictimeperiodtreedef = geotime_tree - discipline.taxontreedef = taxon_tree - discipline.save() - return discipline - - def test_discipline_blocked_when_has_collections(self): - blockers = self._get_blockers(self.discipline) - self._assertSame( - blockers, - [dict(table='Collection', field='discipline', ids=[self.collection.id])], + export_schema = models.Spexportschema.objects.create( + discipline=discipline ) - - def test_discipline_blocked_when_has_users(self): - discipline = self._create_discipline_with_owned_trees('User Blocked Discipline') - resource_dir = models.Spappresourcedir.objects.create( - discipline=discipline, - specifyuser=self.specifyuser, - ispersonal=False, + export_mapping = models.Spexportschemamapping.objects.create( + collectionmemberid=1 ) - - blockers = self._get_blockers(discipline) - self._assertSame( - blockers, - [dict(table='Spappresourcedir', field='specifyuser', ids=[resource_dir.id])], + export_schema.mappings.add(export_mapping) + + using = router.db_for_write(division.__class__, instance=division) + delete_blockers = _collect_delete_blockers(division, using) + + self._assertContains( + delete_blockers, + dict( + table='SpExportSchemaMapping', + field='spExportSchemas', + ids=[export_mapping.id], + ), ) - - with self.assertRaises(BusinessRuleException): - delete_resource( - self.collection, self.agent, 'discipline', discipline.id, discipline.version + self.assertFalse( + any( + blocker['table'] == 'Spexportschema_exportmapping' + for blocker in delete_blockers ) - - def test_discipline_without_users_or_collections_can_be_deleted(self): - discipline = self._create_discipline_with_owned_trees('Deletable Discipline') - blockers = self._get_blockers(discipline) - self._assertSame(blockers, []) - - delete_resource( - self.collection, self.agent, 'discipline', discipline.id, discipline.version ) - self.assertFalse(models.Discipline.objects.filter(id=discipline.id).exists())