mirror of
				https://github.com/lingble/twenty.git
				synced 2025-10-31 12:47:58 +00:00 
			
		
		
		
	Upsert endpoint and CSV import upsert (#5970)
This PR introduces an `upsert` parameter (along the existing `data` param) for `createOne` and `createMany` mutations. When upsert is set to `true`, the function will look for records with the same id if an id was passed. If not id was passed, it will leverage the existing duplicate check mechanism to find a duplicate. If a record is found, then the function will perform an update instead of a create. Unfortunately I had to remove some nice tests that existing on the args factory. Those tests where mostly testing the duplication rule generation logic but through a GraphQL angle. Since I moved the duplication rule logic to a dedicated service, if I kept the tests but mocked the service we wouldn't really be testing anything useful. The right path would be to create new tests for this service that compare the JSON output and not the GraphQL output but I chose not to work on this as it's equivalent to rewriting the tests from scratch and I have other competing priorities.
This commit is contained in:
		| @@ -1,5 +1,5 @@ | ||||
| module.exports = { | ||||
|   schema: process.env.REACT_APP_SERVER_BASE_URL + '/metadata', | ||||
|   schema: (process.env.REACT_APP_SERVER_BASE_URL ?? 'http://localhost:3000') + '/metadata', | ||||
|   documents: [ | ||||
|     './src/modules/databases/graphql/**/*.ts', | ||||
|     './src/modules/object-metadata/graphql/*.ts', | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| module.exports = { | ||||
|   schema: process.env.REACT_APP_SERVER_BASE_URL + '/graphql', | ||||
|   schema: (process.env.REACT_APP_SERVER_BASE_URL ?? 'http://localhost:3000') + '/graphql', | ||||
|   documents: [ | ||||
|     '!./src/modules/databases/**', | ||||
|     '!./src/modules/object-metadata/**', | ||||
|   | ||||
| @@ -291,7 +291,9 @@ export type Mutation = { | ||||
|   deleteCurrentWorkspace: Workspace; | ||||
|   deleteOneObject: Object; | ||||
|   deleteUser: User; | ||||
|   disablePostgresProxy: PostgresCredentials; | ||||
|   emailPasswordResetLink: EmailPasswordResetLink; | ||||
|   enablePostgresProxy: PostgresCredentials; | ||||
|   exchangeAuthorizationCode: ExchangeAuthCode; | ||||
|   generateApiKeyToken: ApiKeyToken; | ||||
|   generateJWT: AuthTokens; | ||||
| @@ -483,6 +485,14 @@ export type PageInfo = { | ||||
|   startCursor?: Maybe<Scalars['ConnectionCursor']>; | ||||
| }; | ||||
|  | ||||
| export type PostgresCredentials = { | ||||
|   __typename?: 'PostgresCredentials'; | ||||
|   id: Scalars['UUID']; | ||||
|   password: Scalars['String']; | ||||
|   user: Scalars['String']; | ||||
|   workspaceId: Scalars['String']; | ||||
| }; | ||||
|  | ||||
| export type ProductPriceEntity = { | ||||
|   __typename?: 'ProductPriceEntity'; | ||||
|   created: Scalars['Float']; | ||||
| @@ -506,6 +516,7 @@ export type Query = { | ||||
|   currentUser: User; | ||||
|   currentWorkspace: Workspace; | ||||
|   findWorkspaceFromInviteHash: Workspace; | ||||
|   getPostgresCredentials?: Maybe<PostgresCredentials>; | ||||
|   getProductPrices: ProductPricesEntity; | ||||
|   getTimelineCalendarEventsFromCompanyId: TimelineCalendarEventsWithTotal; | ||||
|   getTimelineCalendarEventsFromPersonId: TimelineCalendarEventsWithTotal; | ||||
| @@ -1061,8 +1072,6 @@ export type GetTimelineThreadsFromPersonIdQueryVariables = Exact<{ | ||||
|  | ||||
| export type GetTimelineThreadsFromPersonIdQuery = { __typename?: 'Query', getTimelineThreadsFromPersonId: { __typename?: 'TimelineThreadsWithTotal', totalNumberOfThreads: number, timelineThreads: Array<{ __typename?: 'TimelineThread', id: any, read: boolean, visibility: MessageChannelVisibility, lastMessageReceivedAt: string, lastMessageBody: string, subject: string, numberOfMessagesInThread: number, participantCount: number, firstParticipant: { __typename?: 'TimelineThreadParticipant', personId?: any | null, workspaceMemberId?: any | null, firstName: string, lastName: string, displayName: string, avatarUrl: string, handle: string }, lastTwoParticipants: Array<{ __typename?: 'TimelineThreadParticipant', personId?: any | null, workspaceMemberId?: any | null, firstName: string, lastName: string, displayName: string, avatarUrl: string, handle: string }> }> } }; | ||||
|  | ||||
| export type TimelineThreadFragment = { __typename?: 'TimelineThread', id: any, subject: string, lastMessageReceivedAt: string }; | ||||
|  | ||||
| export type TrackMutationVariables = Exact<{ | ||||
|   type: Scalars['String']; | ||||
|   data: Scalars['JSON']; | ||||
| @@ -1364,13 +1373,6 @@ export const TimelineThreadsWithTotalFragmentFragmentDoc = gql` | ||||
|   } | ||||
| } | ||||
|     ${TimelineThreadFragmentFragmentDoc}`; | ||||
| export const TimelineThreadFragmentDoc = gql` | ||||
|     fragment timelineThread on TimelineThread { | ||||
|   id | ||||
|   subject | ||||
|   lastMessageReceivedAt | ||||
| } | ||||
|     `; | ||||
| export const AuthTokenFragmentFragmentDoc = gql` | ||||
|     fragment AuthTokenFragment on AuthToken { | ||||
|   token | ||||
|   | ||||
| @@ -0,0 +1,5 @@ | ||||
| import { RecordGqlConnection } from '@/object-record/graphql/types/RecordGqlConnection'; | ||||
|  | ||||
| export type RecordGqlOperationFindDuplicatesResult = { | ||||
|   [objectNamePlural: string]: RecordGqlConnection[]; | ||||
| }; | ||||
| @@ -3,8 +3,8 @@ import { gql } from '@apollo/client'; | ||||
| import { Person } from '@/people/types/Person'; | ||||
|  | ||||
| export const query = gql` | ||||
|   mutation CreatePeople($data: [PersonCreateInput!]!) { | ||||
|     createPeople(data: $data) { | ||||
|   mutation CreatePeople($data: [PersonCreateInput!]!, $upsert: Boolean) { | ||||
|     createPeople(data: $data, upsert: $upsert) { | ||||
|       __typename | ||||
|       xLink { | ||||
|         label | ||||
|   | ||||
| @@ -4,8 +4,8 @@ import { getPeopleMock } from '~/testing/mock-data/people'; | ||||
| const peopleMock = getPeopleMock(); | ||||
|  | ||||
| export const query = gql` | ||||
|   query FindDuplicatePerson($id: ID!) { | ||||
|     personDuplicates(id: $id) { | ||||
|   query FindDuplicatePerson($ids: [ID!]!) { | ||||
|     personDuplicates(ids: $ids) { | ||||
|       edges { | ||||
|         node { | ||||
|           __typename | ||||
| @@ -38,32 +38,32 @@ export const query = gql` | ||||
|         startCursor | ||||
|         endCursor | ||||
|       } | ||||
|       totalCount | ||||
|     } | ||||
|   } | ||||
| `; | ||||
|  | ||||
| export const variables = { | ||||
|   id: '6205681e-7c11-40b4-9e32-f523dbe54590', | ||||
|   ids: ['6205681e-7c11-40b4-9e32-f523dbe54590'], | ||||
| }; | ||||
|  | ||||
| export const responseData = { | ||||
|   personDuplicates: { | ||||
|     edges: [ | ||||
|       { | ||||
|         node: {  ...peopleMock[0], updatedAt: '' }, | ||||
|         cursor: 'cursor1', | ||||
|   personDuplicates: [ | ||||
|     { | ||||
|       edges: [ | ||||
|         { | ||||
|           node: { ...peopleMock[0], updatedAt: '' }, | ||||
|           cursor: 'cursor1', | ||||
|         }, | ||||
|         { | ||||
|           node: { ...peopleMock[1], updatedAt: '' }, | ||||
|           cursor: 'cursor2', | ||||
|         }, | ||||
|       ], | ||||
|       pageInfo: { | ||||
|         hasNextPage: false, | ||||
|         startCursor: 'cursor1', | ||||
|         endCursor: 'cursor2', | ||||
|       }, | ||||
|       { | ||||
|         node: { ...peopleMock[1], updatedAt: '' }, | ||||
|         cursor: 'cursor2', | ||||
|       }, | ||||
|     ], | ||||
|     pageInfo: { | ||||
|       hasNextPage: false, | ||||
|       startCursor: 'cursor1', | ||||
|       endCursor: 'cursor2', | ||||
|     }, | ||||
|     totalCount: 2, | ||||
|   }, | ||||
|   ], | ||||
| }; | ||||
|   | ||||
| @@ -5,8 +5,8 @@ import { RecoilRoot } from 'recoil'; | ||||
| import { useCreateManyRecordsMutation } from '@/object-record/hooks/useCreateManyRecordsMutation'; | ||||
|  | ||||
| const expectedQueryTemplate = ` | ||||
|   mutation CreatePeople($data: [PersonCreateInput!]!) { | ||||
|     createPeople(data: $data) { | ||||
|   mutation CreatePeople($data: [PersonCreateInput!]!, $upsert: Boolean) { | ||||
|     createPeople(data: $data, upsert: $upsert) { | ||||
|       __typename | ||||
|       xLink { | ||||
|         label | ||||
|   | ||||
| @@ -42,7 +42,7 @@ describe('useFindDuplicateRecords', () => { | ||||
|     const { result } = renderHook( | ||||
|       () => | ||||
|         useFindDuplicateRecords({ | ||||
|           objectRecordId, | ||||
|           objectRecordIds: [objectRecordId], | ||||
|           objectNameSingular, | ||||
|         }), | ||||
|       { | ||||
| @@ -54,7 +54,7 @@ describe('useFindDuplicateRecords', () => { | ||||
|  | ||||
|     await waitFor(() => { | ||||
|       expect(result.current.loading).toBe(false); | ||||
|       expect(result.current.records).toBeDefined(); | ||||
|       expect(result.current.results).toBeDefined(); | ||||
|     }); | ||||
|  | ||||
|     expect(mocks[0].result).toHaveBeenCalled(); | ||||
|   | ||||
| @@ -5,8 +5,8 @@ import { RecoilRoot } from 'recoil'; | ||||
| import { useFindDuplicateRecordsQuery } from '@/object-record/hooks/useFindDuplicatesRecordsQuery'; | ||||
|  | ||||
| const expectedQueryTemplate = ` | ||||
|   query FindDuplicatePerson($id: ID!) { | ||||
|     personDuplicates(id: $id) { | ||||
|   query FindDuplicatePerson($ids: [ID!]!) { | ||||
|     personDuplicates(ids: $ids) { | ||||
|       edges { | ||||
|         node { | ||||
|           __typename | ||||
| @@ -39,7 +39,6 @@ const expectedQueryTemplate = ` | ||||
|         startCursor | ||||
|         endCursor | ||||
|       } | ||||
|       totalCount | ||||
|      } | ||||
|     } | ||||
| `.replace(/\s/g, ''); | ||||
|   | ||||
| @@ -49,10 +49,11 @@ export const useCreateManyRecords = < | ||||
|  | ||||
|   const createManyRecords = async ( | ||||
|     recordsToCreate: Partial<CreatedObjectRecord>[], | ||||
|     upsert?: boolean, | ||||
|   ) => { | ||||
|     const sanitizedCreateManyRecordsInput = recordsToCreate.map( | ||||
|       (recordToCreate) => { | ||||
|         const idForCreation = recordToCreate?.id ?? v4(); | ||||
|         const idForCreation = recordToCreate?.id ?? (upsert ? undefined : v4()); | ||||
|  | ||||
|         return { | ||||
|           ...sanitizeRecordInput({ | ||||
| @@ -67,8 +68,12 @@ export const useCreateManyRecords = < | ||||
|     const recordsCreatedInCache = []; | ||||
|  | ||||
|     for (const recordToCreate of sanitizedCreateManyRecordsInput) { | ||||
|       if (recordToCreate.id === null) { | ||||
|         continue; | ||||
|       } | ||||
|  | ||||
|       const recordCreatedInCache = createOneRecordInCache({ | ||||
|         ...recordToCreate, | ||||
|         ...(recordToCreate as { id: string }), | ||||
|         __typename: getObjectTypename(objectMetadataItem.nameSingular), | ||||
|       }); | ||||
|  | ||||
| @@ -94,6 +99,7 @@ export const useCreateManyRecords = < | ||||
|       mutation: createManyRecordsMutation, | ||||
|       variables: { | ||||
|         data: sanitizedCreateManyRecordsInput, | ||||
|         upsert: upsert, | ||||
|       }, | ||||
|       update: (cache, { data }) => { | ||||
|         const records = data?.[mutationResponseField]; | ||||
|   | ||||
| @@ -34,12 +34,16 @@ export const useCreateManyRecordsMutation = ({ | ||||
|   const createManyRecordsMutation = gql` | ||||
|     mutation Create${capitalize( | ||||
|       objectMetadataItem.namePlural, | ||||
|     )}($data: [${capitalize(objectMetadataItem.nameSingular)}CreateInput!]!)  { | ||||
|       ${mutationResponseField}(data: $data) ${mapObjectMetadataToGraphQLQuery({ | ||||
|         objectMetadataItems, | ||||
|         objectMetadataItem, | ||||
|         recordGqlFields, | ||||
|       })} | ||||
|     )}($data: [${capitalize( | ||||
|       objectMetadataItem.nameSingular, | ||||
|     )}CreateInput!]!, $upsert: Boolean)  { | ||||
|       ${mutationResponseField}(data: $data, upsert: $upsert) ${mapObjectMetadataToGraphQLQuery( | ||||
|         { | ||||
|           objectMetadataItems, | ||||
|           objectMetadataItem, | ||||
|           recordGqlFields, | ||||
|         }, | ||||
|       )} | ||||
|   }`; | ||||
|  | ||||
|   return { | ||||
|   | ||||
| @@ -5,7 +5,7 @@ import { useObjectMetadataItem } from '@/object-metadata/hooks/useObjectMetadata | ||||
| import { ObjectMetadataItemIdentifier } from '@/object-metadata/types/ObjectMetadataItemIdentifier'; | ||||
| import { getRecordsFromRecordConnection } from '@/object-record/cache/utils/getRecordsFromRecordConnection'; | ||||
| import { RecordGqlConnection } from '@/object-record/graphql/types/RecordGqlConnection'; | ||||
| import { RecordGqlOperationFindManyResult } from '@/object-record/graphql/types/RecordGqlOperationFindManyResult'; | ||||
| import { RecordGqlOperationFindDuplicatesResult } from '@/object-record/graphql/types/RecordGqlOperationFindDuplicatesResults'; | ||||
| import { useFindDuplicateRecordsQuery } from '@/object-record/hooks/useFindDuplicatesRecordsQuery'; | ||||
| import { ObjectRecord } from '@/object-record/types/ObjectRecord'; | ||||
| import { getFindDuplicateRecordsQueryResponseField } from '@/object-record/utils/getFindDuplicateRecordsQueryResponseField'; | ||||
| @@ -14,12 +14,12 @@ import { useSnackBar } from '@/ui/feedback/snack-bar-manager/hooks/useSnackBar'; | ||||
| import { logError } from '~/utils/logError'; | ||||
|  | ||||
| export const useFindDuplicateRecords = <T extends ObjectRecord = ObjectRecord>({ | ||||
|   objectRecordId = '', | ||||
|   objectRecordIds = [], | ||||
|   objectNameSingular, | ||||
|   onCompleted, | ||||
| }: ObjectMetadataItemIdentifier & { | ||||
|   objectRecordId: string | undefined; | ||||
|   onCompleted?: (data: RecordGqlConnection) => void; | ||||
|   objectRecordIds: string[] | undefined; | ||||
|   onCompleted?: (data: RecordGqlConnection[]) => void; | ||||
|   skip?: boolean; | ||||
| }) => { | ||||
|   const findDuplicateQueryStateIdentifier = objectNameSingular; | ||||
| @@ -38,46 +38,48 @@ export const useFindDuplicateRecords = <T extends ObjectRecord = ObjectRecord>({ | ||||
|     objectMetadataItem.nameSingular, | ||||
|   ); | ||||
|  | ||||
|   const { data, loading, error } = useQuery<RecordGqlOperationFindManyResult>( | ||||
|     findDuplicateRecordsQuery, | ||||
|     { | ||||
|       variables: { | ||||
|         id: objectRecordId, | ||||
|   const { data, loading, error } = | ||||
|     useQuery<RecordGqlOperationFindDuplicatesResult>( | ||||
|       findDuplicateRecordsQuery, | ||||
|       { | ||||
|         variables: { | ||||
|           ids: objectRecordIds, | ||||
|         }, | ||||
|         onCompleted: (data) => { | ||||
|           onCompleted?.(data[queryResponseField]); | ||||
|         }, | ||||
|         onError: (error) => { | ||||
|           logError( | ||||
|             `useFindDuplicateRecords for "${objectMetadataItem.nameSingular}" error : ` + | ||||
|               error, | ||||
|           ); | ||||
|           enqueueSnackBar( | ||||
|             `Error during useFindDuplicateRecords for "${objectMetadataItem.nameSingular}", ${error.message}`, | ||||
|             { | ||||
|               variant: SnackBarVariant.Error, | ||||
|             }, | ||||
|           ); | ||||
|         }, | ||||
|       }, | ||||
|       onCompleted: (data) => { | ||||
|         onCompleted?.(data[queryResponseField]); | ||||
|       }, | ||||
|       onError: (error) => { | ||||
|         logError( | ||||
|           `useFindDuplicateRecords for "${objectMetadataItem.nameSingular}" error : ` + | ||||
|             error, | ||||
|         ); | ||||
|         enqueueSnackBar( | ||||
|           `Error during useFindDuplicateRecords for "${objectMetadataItem.nameSingular}", ${error.message}`, | ||||
|           { | ||||
|             variant: SnackBarVariant.Error, | ||||
|           }, | ||||
|         ); | ||||
|       }, | ||||
|     }, | ||||
|   ); | ||||
|     ); | ||||
|  | ||||
|   const objectRecordConnection = data?.[queryResponseField]; | ||||
|   const objectResults = data?.[queryResponseField]; | ||||
|  | ||||
|   const records = useMemo( | ||||
|   const results = useMemo( | ||||
|     () => | ||||
|       objectRecordConnection | ||||
|         ? (getRecordsFromRecordConnection({ | ||||
|             recordConnection: objectRecordConnection, | ||||
|           }) as T[]) | ||||
|         : [], | ||||
|     [objectRecordConnection], | ||||
|       objectResults?.map((result: RecordGqlConnection) => { | ||||
|         return result | ||||
|           ? (getRecordsFromRecordConnection({ | ||||
|               recordConnection: result, | ||||
|             }) as T[]) | ||||
|           : []; | ||||
|       }), | ||||
|     [objectResults], | ||||
|   ); | ||||
|  | ||||
|   return { | ||||
|     objectMetadataItem, | ||||
|     records, | ||||
|     totalCount: objectRecordConnection?.totalCount, | ||||
|     results, | ||||
|     loading, | ||||
|     error, | ||||
|     queryStateIdentifier: findDuplicateQueryStateIdentifier, | ||||
|   | ||||
| @@ -22,10 +22,10 @@ export const useFindDuplicateRecordsQuery = ({ | ||||
|   const findDuplicateRecordsQuery = gql` | ||||
|     query FindDuplicate${capitalize( | ||||
|       objectMetadataItem.nameSingular, | ||||
|     )}($id: ID!) { | ||||
|     )}($ids: [ID!]!) { | ||||
|       ${getFindDuplicateRecordsQueryResponseField( | ||||
|         objectMetadataItem.nameSingular, | ||||
|       )}(id: $id) { | ||||
|       )}(ids: $ids) { | ||||
|         edges { | ||||
|           node ${mapObjectMetadataToGraphQLQuery({ | ||||
|             objectMetadataItems, | ||||
| @@ -38,7 +38,6 @@ export const useFindDuplicateRecordsQuery = ({ | ||||
|           startCursor | ||||
|           endCursor | ||||
|         } | ||||
|         ${isAggregationEnabled(objectMetadataItem) ? 'totalCount' : ''} | ||||
|       } | ||||
|     } | ||||
|   `; | ||||
|   | ||||
| @@ -12,18 +12,19 @@ export const RecordDetailDuplicatesSection = ({ | ||||
|   objectRecordId: string; | ||||
|   objectNameSingular: string; | ||||
| }) => { | ||||
|   const { records: duplicateRecords } = useFindDuplicateRecords({ | ||||
|     objectRecordId, | ||||
|   const { results: queryResults } = useFindDuplicateRecords({ | ||||
|     objectRecordIds: [objectRecordId], | ||||
|     objectNameSingular, | ||||
|   }); | ||||
|  | ||||
|   if (!duplicateRecords.length) return null; | ||||
|   if (!queryResults || !queryResults[0] || queryResults[0].length === 0) | ||||
|     return null; | ||||
|  | ||||
|   return ( | ||||
|     <RecordDetailSection> | ||||
|       <RecordDetailSectionHeader title="Duplicates" /> | ||||
|       <RecordDetailRecordsList> | ||||
|         {duplicateRecords.slice(0, 5).map((duplicateRecord) => ( | ||||
|         {queryResults[0].slice(0, 5).map((duplicateRecord) => ( | ||||
|           <RecordDetailRecordsListItem key={duplicateRecord.id}> | ||||
|             <RecordChip | ||||
|               record={duplicateRecord} | ||||
|   | ||||
| @@ -20,8 +20,11 @@ const companyMocks = [ | ||||
|   { | ||||
|     request: { | ||||
|       query: gql` | ||||
|         mutation CreateCompanies($data: [CompanyCreateInput!]!) { | ||||
|           createCompanies(data: $data) { | ||||
|         mutation CreateCompanies( | ||||
|           $data: [CompanyCreateInput!]! | ||||
|           $upsert: Boolean | ||||
|         ) { | ||||
|           createCompanies(data: $data, upsert: $upsert) { | ||||
|             __typename | ||||
|             xLink { | ||||
|               label | ||||
| @@ -58,6 +61,7 @@ const companyMocks = [ | ||||
|             id: companyId, | ||||
|           }, | ||||
|         ], | ||||
|         upsert: true, | ||||
|       }, | ||||
|     }, | ||||
|     result: jest.fn(() => ({ | ||||
|   | ||||
| @@ -26,7 +26,7 @@ export const useSpreadsheetRecordImport = (objectNameSingular: string) => { | ||||
|     .filter( | ||||
|       (x) => | ||||
|         x.isActive && | ||||
|         !x.isSystem && | ||||
|         (!x.isSystem || x.name === 'id') && | ||||
|         x.name !== 'createdAt' && | ||||
|         (x.type !== FieldMetadataType.Relation || x.toRelationMetadata), | ||||
|     ) | ||||
| @@ -110,11 +110,15 @@ export const useSpreadsheetRecordImport = (objectNameSingular: string) => { | ||||
|  | ||||
|             switch (field.type) { | ||||
|               case FieldMetadataType.Boolean: | ||||
|                 fieldMapping[field.name] = value === 'true' || value === true; | ||||
|                 if (value !== undefined) { | ||||
|                   fieldMapping[field.name] = value === 'true' || value === true; | ||||
|                 } | ||||
|                 break; | ||||
|               case FieldMetadataType.Number: | ||||
|               case FieldMetadataType.Numeric: | ||||
|                 fieldMapping[field.name] = Number(value); | ||||
|                 if (value !== undefined) { | ||||
|                   fieldMapping[field.name] = Number(value); | ||||
|                 } | ||||
|                 break; | ||||
|               case FieldMetadataType.Currency: | ||||
|                 if (value !== undefined) { | ||||
| @@ -154,14 +158,16 @@ export const useSpreadsheetRecordImport = (objectNameSingular: string) => { | ||||
|                 } | ||||
|                 break; | ||||
|               default: | ||||
|                 fieldMapping[field.name] = value; | ||||
|                 if (value !== undefined) { | ||||
|                   fieldMapping[field.name] = value; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|           } | ||||
|           return fieldMapping; | ||||
|         }); | ||||
|         try { | ||||
|           await createManyRecords(createInputs); | ||||
|           await createManyRecords(createInputs, true); | ||||
|         } catch (error: any) { | ||||
|           enqueueSnackBar(error?.message || 'Something went wrong', { | ||||
|             variant: SnackBarVariant.Error, | ||||
|   | ||||
| @@ -24,6 +24,7 @@ describe('formatToHumanReadableTime', () => { | ||||
|   it('should format the date to a human-readable time', () => { | ||||
|     const date = new Date('2022-01-01T12:30:00'); | ||||
|     const result = formatToHumanReadableTime(date); | ||||
|     expect(result).toBe('12:30 PM'); | ||||
|     // it seems when running locally on MacOS the space is not the same | ||||
|     expect(['12:30 PM', '12:30 PM']).toContain(result); | ||||
|   }); | ||||
| }); | ||||
|   | ||||
| @@ -1,206 +0,0 @@ | ||||
| import { Test, TestingModule } from '@nestjs/testing'; | ||||
|  | ||||
| import { RecordFilter } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
| import { FindDuplicatesResolverArgs } from 'src/engine/api/graphql/workspace-resolver-builder/interfaces/workspace-resolvers-builder.interface'; | ||||
|  | ||||
| import { ArgsAliasFactory } from 'src/engine/api/graphql/workspace-query-builder/factories/args-alias.factory'; | ||||
| import { FieldsStringFactory } from 'src/engine/api/graphql/workspace-query-builder/factories/fields-string.factory'; | ||||
| import { FindDuplicatesQueryFactory } from 'src/engine/api/graphql/workspace-query-builder/factories/find-duplicates-query.factory'; | ||||
| import { workspaceQueryBuilderOptionsMock } from 'src/engine/api/graphql/workspace-query-builder/__mocks__/workspace-query-builder-options.mock'; | ||||
|  | ||||
| describe('FindDuplicatesQueryFactory', () => { | ||||
|   let service: FindDuplicatesQueryFactory; | ||||
|   const argAliasCreate = jest.fn(); | ||||
|  | ||||
|   beforeEach(async () => { | ||||
|     jest.resetAllMocks(); | ||||
|  | ||||
|     const module: TestingModule = await Test.createTestingModule({ | ||||
|       providers: [ | ||||
|         FindDuplicatesQueryFactory, | ||||
|         { | ||||
|           provide: FieldsStringFactory, | ||||
|           useValue: { | ||||
|             create: jest.fn().mockResolvedValue('fieldsString'), | ||||
|             // Mock implementation of FieldsStringFactory methods if needed | ||||
|           }, | ||||
|         }, | ||||
|         { | ||||
|           provide: ArgsAliasFactory, | ||||
|           useValue: { | ||||
|             create: argAliasCreate, | ||||
|             // Mock implementation of ArgsAliasFactory methods if needed | ||||
|           }, | ||||
|         }, | ||||
|       ], | ||||
|     }).compile(); | ||||
|  | ||||
|     service = module.get<FindDuplicatesQueryFactory>( | ||||
|       FindDuplicatesQueryFactory, | ||||
|     ); | ||||
|   }); | ||||
|  | ||||
|   it('should be defined', () => { | ||||
|     expect(service).toBeDefined(); | ||||
|   }); | ||||
|  | ||||
|   describe('create', () => { | ||||
|     it('should return (first: 0) as a filter when args are missing', async () => { | ||||
|       const args: FindDuplicatesResolverArgs<RecordFilter> = {}; | ||||
|  | ||||
|       const query = await service.create( | ||||
|         args, | ||||
|         workspaceQueryBuilderOptionsMock, | ||||
|       ); | ||||
|  | ||||
|       expect(query.trim()).toEqual(`query { | ||||
|         objectNameCollection(first: 0) { | ||||
|           fieldsString | ||||
|         } | ||||
|       }`); | ||||
|     }); | ||||
|  | ||||
|     it('should use firstName and lastName as a filter when both args are present', async () => { | ||||
|       argAliasCreate.mockReturnValue({ | ||||
|         nameFirstName: 'John', | ||||
|         nameLastName: 'Doe', | ||||
|       }); | ||||
|  | ||||
|       const args: FindDuplicatesResolverArgs<RecordFilter> = { | ||||
|         data: { | ||||
|           name: { | ||||
|             firstName: 'John', | ||||
|             lastName: 'Doe', | ||||
|           }, | ||||
|         } as unknown as RecordFilter, | ||||
|       }; | ||||
|  | ||||
|       const query = await service.create(args, { | ||||
|         ...workspaceQueryBuilderOptionsMock, | ||||
|         objectMetadataItem: { | ||||
|           ...workspaceQueryBuilderOptionsMock.objectMetadataItem, | ||||
|           nameSingular: 'person', | ||||
|         }, | ||||
|       }); | ||||
|  | ||||
|       expect(query.trim()).toEqual(`query { | ||||
|         personCollection(filter: {or:[{nameFirstName:{eq:"John"},nameLastName:{eq:"Doe"}}]}) { | ||||
|           fieldsString | ||||
|         } | ||||
|       }`); | ||||
|     }); | ||||
|  | ||||
|     it('should ignore an argument if the string length is less than 3', async () => { | ||||
|       argAliasCreate.mockReturnValue({ | ||||
|         linkedinLinkUrl: 'ab', | ||||
|         email: 'test@test.com', | ||||
|       }); | ||||
|  | ||||
|       const args: FindDuplicatesResolverArgs<RecordFilter> = { | ||||
|         data: { | ||||
|           linkedinLinkUrl: 'ab', | ||||
|           email: 'test@test.com', | ||||
|         } as unknown as RecordFilter, | ||||
|       }; | ||||
|  | ||||
|       const query = await service.create(args, { | ||||
|         ...workspaceQueryBuilderOptionsMock, | ||||
|         objectMetadataItem: { | ||||
|           ...workspaceQueryBuilderOptionsMock.objectMetadataItem, | ||||
|           nameSingular: 'person', | ||||
|         }, | ||||
|       }); | ||||
|  | ||||
|       expect(query.trim()).toEqual(`query { | ||||
|         personCollection(filter: {or:[{email:{eq:"test@test.com"}}]}) { | ||||
|           fieldsString | ||||
|         } | ||||
|       }`); | ||||
|     }); | ||||
|  | ||||
|     it('should return (first: 0) as a filter when only firstName is present', async () => { | ||||
|       argAliasCreate.mockReturnValue({ | ||||
|         nameFirstName: 'John', | ||||
|       }); | ||||
|  | ||||
|       const args: FindDuplicatesResolverArgs<RecordFilter> = { | ||||
|         data: { | ||||
|           name: { | ||||
|             firstName: 'John', | ||||
|           }, | ||||
|         } as unknown as RecordFilter, | ||||
|       }; | ||||
|  | ||||
|       const query = await service.create(args, { | ||||
|         ...workspaceQueryBuilderOptionsMock, | ||||
|         objectMetadataItem: { | ||||
|           ...workspaceQueryBuilderOptionsMock.objectMetadataItem, | ||||
|           nameSingular: 'person', | ||||
|         }, | ||||
|       }); | ||||
|  | ||||
|       expect(query.trim()).toEqual(`query { | ||||
|         personCollection(first: 0) { | ||||
|           fieldsString | ||||
|         } | ||||
|       }`); | ||||
|     }); | ||||
|  | ||||
|     it('should use "currentRecord" as query args when its present', async () => { | ||||
|       argAliasCreate.mockReturnValue({ | ||||
|         nameFirstName: 'John', | ||||
|       }); | ||||
|  | ||||
|       const args: FindDuplicatesResolverArgs<RecordFilter> = { | ||||
|         id: 'uuid', | ||||
|       }; | ||||
|  | ||||
|       const query = await service.create( | ||||
|         args, | ||||
|         { | ||||
|           ...workspaceQueryBuilderOptionsMock, | ||||
|           objectMetadataItem: { | ||||
|             ...workspaceQueryBuilderOptionsMock.objectMetadataItem, | ||||
|             nameSingular: 'person', | ||||
|           }, | ||||
|         }, | ||||
|         { | ||||
|           nameFirstName: 'Peter', | ||||
|           nameLastName: 'Parker', | ||||
|         }, | ||||
|       ); | ||||
|  | ||||
|       expect(query.trim()).toEqual(`query { | ||||
|         personCollection(filter: {id:{neq:"uuid"},or:[{nameFirstName:{eq:"Peter"},nameLastName:{eq:"Parker"}}]}) { | ||||
|           fieldsString | ||||
|         } | ||||
|       }`); | ||||
|     }); | ||||
|   }); | ||||
|  | ||||
|   describe('buildQueryForExistingRecord', () => { | ||||
|     it(`should include all the fields that exist for person inside "duplicateCriteriaCollection" constant`, async () => { | ||||
|       const query = service.buildQueryForExistingRecord('uuid', { | ||||
|         ...workspaceQueryBuilderOptionsMock, | ||||
|         objectMetadataItem: { | ||||
|           ...workspaceQueryBuilderOptionsMock.objectMetadataItem, | ||||
|           nameSingular: 'person', | ||||
|         }, | ||||
|       }); | ||||
|  | ||||
|       expect(query.trim()).toEqual(`query { | ||||
|         personCollection(filter: { id: { eq: "uuid" }}){ | ||||
|           edges { | ||||
|             node { | ||||
|               __typename | ||||
|               nameFirstName | ||||
| nameLastName | ||||
| linkedinLinkUrl | ||||
| email | ||||
|             } | ||||
|           } | ||||
|         } | ||||
|       }`); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
| @@ -22,7 +22,7 @@ export class CreateManyQueryFactory { | ||||
|   ) {} | ||||
|  | ||||
|   async create<Record extends IRecord = IRecord>( | ||||
|     args: CreateManyResolverArgs<Record>, | ||||
|     args: CreateManyResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|   ) { | ||||
|     const fieldsString = await this.fieldsStringFactory.create( | ||||
|   | ||||
| @@ -6,6 +6,7 @@ import isEmpty from 'lodash.isempty'; | ||||
|  | ||||
| import { FieldMetadataInterface } from 'src/engine/metadata-modules/field-metadata/interfaces/field-metadata.interface'; | ||||
| import { ObjectMetadataInterface } from 'src/engine/metadata-modules/field-metadata/interfaces/object-metadata.interface'; | ||||
| import { Record } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { isRelationFieldMetadataType } from 'src/engine/utils/is-relation-field-metadata-type.util'; | ||||
|  | ||||
| @@ -26,7 +27,7 @@ export class FieldsStringFactory { | ||||
|     fieldMetadataCollection: FieldMetadataInterface[], | ||||
|     objectMetadataCollection: ObjectMetadataInterface[], | ||||
|   ): Promise<string> { | ||||
|     const selectedFields: Record<string, any> = graphqlFields(info); | ||||
|     const selectedFields: Partial<Record> = graphqlFields(info); | ||||
|  | ||||
|     return this.createFieldsStringRecursive( | ||||
|       info, | ||||
| @@ -38,7 +39,7 @@ export class FieldsStringFactory { | ||||
|  | ||||
|   async createFieldsStringRecursive( | ||||
|     info: GraphQLResolveInfo, | ||||
|     selectedFields: Record<string, any>, | ||||
|     selectedFields: Partial<Record>, | ||||
|     fieldMetadataCollection: FieldMetadataInterface[], | ||||
|     objectMetadataCollection: ObjectMetadataInterface[], | ||||
|     accumulator = '', | ||||
|   | ||||
| @@ -3,15 +3,13 @@ import { Injectable, Logger } from '@nestjs/common'; | ||||
| import isEmpty from 'lodash.isempty'; | ||||
|  | ||||
| import { WorkspaceQueryBuilderOptions } from 'src/engine/api/graphql/workspace-query-builder/interfaces/workspace-query-builder-options.interface'; | ||||
| import { RecordFilter } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
| import { Record } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
| import { FindDuplicatesResolverArgs } from 'src/engine/api/graphql/workspace-resolver-builder/interfaces/workspace-resolvers-builder.interface'; | ||||
| import { ObjectMetadataInterface } from 'src/engine/metadata-modules/field-metadata/interfaces/object-metadata.interface'; | ||||
|  | ||||
| import { computeObjectTargetTable } from 'src/engine/utils/compute-object-target-table.util'; | ||||
| import { stringifyWithoutKeyQuote } from 'src/engine/api/graphql/workspace-query-builder/utils/stringify-without-key-quote.util'; | ||||
| import { ArgsAliasFactory } from 'src/engine/api/graphql/workspace-query-builder/factories/args-alias.factory'; | ||||
| import { DUPLICATE_CRITERIA_COLLECTION } from 'src/engine/api/graphql/workspace-resolver-builder/constants/duplicate-criteria.constants'; | ||||
| import { settings } from 'src/engine/constants/settings'; | ||||
| import { DuplicateService } from 'src/engine/core-modules/duplicate/duplicate.service'; | ||||
|  | ||||
| import { FieldsStringFactory } from './fields-string.factory'; | ||||
|  | ||||
| @@ -22,12 +20,13 @@ export class FindDuplicatesQueryFactory { | ||||
|   constructor( | ||||
|     private readonly fieldsStringFactory: FieldsStringFactory, | ||||
|     private readonly argsAliasFactory: ArgsAliasFactory, | ||||
|     private readonly duplicateService: DuplicateService, | ||||
|   ) {} | ||||
|  | ||||
|   async create<Filter extends RecordFilter = RecordFilter>( | ||||
|     args: FindDuplicatesResolverArgs<Filter>, | ||||
|   async create( | ||||
|     args: FindDuplicatesResolverArgs, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|     currentRecord?: Record<string, unknown>, | ||||
|     existingRecords?: Record[], | ||||
|   ) { | ||||
|     const fieldsString = await this.fieldsStringFactory.create( | ||||
|       options.info, | ||||
| @@ -35,121 +34,66 @@ export class FindDuplicatesQueryFactory { | ||||
|       options.objectMetadataCollection, | ||||
|     ); | ||||
|  | ||||
|     const argsData = this.getFindDuplicateBy<Filter>( | ||||
|       args, | ||||
|       options, | ||||
|       currentRecord, | ||||
|     ); | ||||
|     if (existingRecords) { | ||||
|       const query = existingRecords.reduce((acc, record, index) => { | ||||
|         return ( | ||||
|           acc + this.buildQuery(fieldsString, options, undefined, record, index) | ||||
|         ); | ||||
|       }, ''); | ||||
|  | ||||
|     const duplicateCondition = this.buildDuplicateCondition( | ||||
|       options.objectMetadataItem, | ||||
|       argsData, | ||||
|       args.id, | ||||
|     ); | ||||
|       return `query { | ||||
|         ${query} | ||||
|       }`; | ||||
|     } | ||||
|  | ||||
|     const query = args.data?.reduce((acc, dataItem, index) => { | ||||
|       const argsData = this.argsAliasFactory.create( | ||||
|         dataItem ?? {}, | ||||
|         options.fieldMetadataCollection, | ||||
|       ); | ||||
|  | ||||
|       return ( | ||||
|         acc + | ||||
|         this.buildQuery( | ||||
|           fieldsString, | ||||
|           options, | ||||
|           argsData as Record, | ||||
|           undefined, | ||||
|           index, | ||||
|         ) | ||||
|       ); | ||||
|     }, ''); | ||||
|  | ||||
|     return `query { | ||||
|       ${query} | ||||
|     }`; | ||||
|   } | ||||
|  | ||||
|   buildQuery( | ||||
|     fieldsString: string, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|     data?: Record, | ||||
|     existingRecord?: Record, | ||||
|     index?: number, | ||||
|   ) { | ||||
|     const duplicateCondition = | ||||
|       this.duplicateService.buildDuplicateConditionForGraphQL( | ||||
|         options.objectMetadataItem, | ||||
|         data ?? existingRecord, | ||||
|         existingRecord?.id, | ||||
|       ); | ||||
|  | ||||
|     const filters = stringifyWithoutKeyQuote(duplicateCondition); | ||||
|  | ||||
|     return ` | ||||
|       query { | ||||
|         ${computeObjectTargetTable(options.objectMetadataItem)}Collection${ | ||||
|           isEmpty(duplicateCondition?.or) | ||||
|             ? '(first: 0)' | ||||
|             : `(filter: ${filters})` | ||||
|         } { | ||||
|           ${fieldsString} | ||||
|         } | ||||
|       } | ||||
|     `; | ||||
|   } | ||||
|  | ||||
|   getFindDuplicateBy<Filter extends RecordFilter = RecordFilter>( | ||||
|     args: FindDuplicatesResolverArgs<Filter>, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|     currentRecord?: Record<string, unknown>, | ||||
|   ) { | ||||
|     if (currentRecord) { | ||||
|       return currentRecord; | ||||
|     return `${computeObjectTargetTable( | ||||
|       options.objectMetadataItem, | ||||
|     )}Collection${index}: ${computeObjectTargetTable( | ||||
|       options.objectMetadataItem, | ||||
|     )}Collection${ | ||||
|       isEmpty(duplicateCondition?.or) ? '(first: 0)' : `(filter: ${filters})` | ||||
|     } { | ||||
|         ${fieldsString} | ||||
|     } | ||||
|  | ||||
|     return this.argsAliasFactory.create( | ||||
|       args.data ?? {}, | ||||
|       options.fieldMetadataCollection, | ||||
|     ); | ||||
|   } | ||||
|  | ||||
|   buildQueryForExistingRecord( | ||||
|     id: string | number, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|   ) { | ||||
|     const idQueryField = typeof id === 'string' ? `"${id}"` : id; | ||||
|  | ||||
|     return ` | ||||
|       query { | ||||
|         ${computeObjectTargetTable( | ||||
|           options.objectMetadataItem, | ||||
|         )}Collection(filter: { id: { eq: ${idQueryField} }}){ | ||||
|           edges { | ||||
|             node { | ||||
|               __typename | ||||
|               ${this.getApplicableDuplicateCriteriaCollection( | ||||
|                 options.objectMetadataItem, | ||||
|               ) | ||||
|                 .flatMap((dc) => dc.columnNames) | ||||
|                 .join('\n')} | ||||
|             } | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     `; | ||||
|   } | ||||
|  | ||||
|   private buildDuplicateCondition( | ||||
|     objectMetadataItem: ObjectMetadataInterface, | ||||
|     argsData?: Record<string, unknown>, | ||||
|     filteringByExistingRecordId?: string, | ||||
|   ) { | ||||
|     if (!argsData) { | ||||
|       return; | ||||
|     } | ||||
|  | ||||
|     const criteriaCollection = | ||||
|       this.getApplicableDuplicateCriteriaCollection(objectMetadataItem); | ||||
|  | ||||
|     const criteriaWithMatchingArgs = criteriaCollection.filter((criteria) => | ||||
|       criteria.columnNames.every((columnName) => { | ||||
|         const value = argsData[columnName] as string | undefined; | ||||
|  | ||||
|         return ( | ||||
|           !!value && value.length >= settings.minLengthOfStringForDuplicateCheck | ||||
|         ); | ||||
|       }), | ||||
|     ); | ||||
|  | ||||
|     const filterCriteria = criteriaWithMatchingArgs.map((criteria) => | ||||
|       Object.fromEntries( | ||||
|         criteria.columnNames.map((columnName) => [ | ||||
|           columnName, | ||||
|           { eq: argsData[columnName] }, | ||||
|         ]), | ||||
|       ), | ||||
|     ); | ||||
|  | ||||
|     return { | ||||
|       // when filtering by an existing record, we need to filter that explicit record out | ||||
|       ...(filteringByExistingRecordId && { | ||||
|         id: { neq: filteringByExistingRecordId }, | ||||
|       }), | ||||
|       // keep condition as "or" to get results by more duplicate criteria | ||||
|       or: filterCriteria, | ||||
|     }; | ||||
|   } | ||||
|  | ||||
|   private getApplicableDuplicateCriteriaCollection( | ||||
|     objectMetadataItem: ObjectMetadataInterface, | ||||
|   ) { | ||||
|     return DUPLICATE_CRITERIA_COLLECTION.filter( | ||||
|       (duplicateCriteria) => | ||||
|         duplicateCriteria.objectName === objectMetadataItem.nameSingular, | ||||
|     ); | ||||
|   `; | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -28,7 +28,7 @@ export class UpdateManyQueryFactory { | ||||
|     Record extends IRecord = IRecord, | ||||
|     Filter extends RecordFilter = RecordFilter, | ||||
|   >( | ||||
|     args: UpdateManyResolverArgs<Record, Filter>, | ||||
|     args: UpdateManyResolverArgs<Partial<Record>, Filter>, | ||||
|     options: UpdateManyQueryFactoryOptions, | ||||
|   ) { | ||||
|     const fieldsString = await this.fieldsStringFactory.create( | ||||
|   | ||||
| @@ -20,7 +20,7 @@ export class UpdateOneQueryFactory { | ||||
|   ) {} | ||||
|  | ||||
|   async create<Record extends IRecord = IRecord>( | ||||
|     args: UpdateOneResolverArgs<Record>, | ||||
|     args: UpdateOneResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|   ) { | ||||
|     const fieldsString = await this.fieldsStringFactory.create( | ||||
| @@ -35,6 +35,7 @@ export class UpdateOneQueryFactory { | ||||
|  | ||||
|     const argsData = { | ||||
|       ...computedArgs.data, | ||||
|       id: undefined, // do not allow updating an existing object's id | ||||
|       updatedAt: new Date().toISOString(), | ||||
|     }; | ||||
|  | ||||
|   | ||||
| @@ -64,37 +64,27 @@ export class WorkspaceQueryBuilderFactory { | ||||
|     return this.findOneQueryFactory.create<Filter>(args, options); | ||||
|   } | ||||
|  | ||||
|   findDuplicates<Filter extends RecordFilter = RecordFilter>( | ||||
|     args: FindDuplicatesResolverArgs<Filter>, | ||||
|   findDuplicates( | ||||
|     args: FindDuplicatesResolverArgs, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|     existingRecord?: Record<string, unknown>, | ||||
|     existingRecords?: IRecord[], | ||||
|   ): Promise<string> { | ||||
|     return this.findDuplicatesQueryFactory.create<Filter>( | ||||
|     return this.findDuplicatesQueryFactory.create( | ||||
|       args, | ||||
|       options, | ||||
|       existingRecord, | ||||
|     ); | ||||
|   } | ||||
|  | ||||
|   findDuplicatesExistingRecord( | ||||
|     id: string | number, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|   ): string { | ||||
|     return this.findDuplicatesQueryFactory.buildQueryForExistingRecord( | ||||
|       id, | ||||
|       options, | ||||
|       existingRecords, | ||||
|     ); | ||||
|   } | ||||
|  | ||||
|   createMany<Record extends IRecord = IRecord>( | ||||
|     args: CreateManyResolverArgs<Record>, | ||||
|     args: CreateManyResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|   ): Promise<string> { | ||||
|     return this.createManyQueryFactory.create<Record>(args, options); | ||||
|   } | ||||
|  | ||||
|   updateOne<Record extends IRecord = IRecord>( | ||||
|     initialArgs: UpdateOneResolverArgs<Record>, | ||||
|     initialArgs: UpdateOneResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryBuilderOptions, | ||||
|   ): Promise<string> { | ||||
|     return this.updateOneQueryFactory.create<Record>(initialArgs, options); | ||||
| @@ -111,7 +101,7 @@ export class WorkspaceQueryBuilderFactory { | ||||
|     Record extends IRecord = IRecord, | ||||
|     Filter extends RecordFilter = RecordFilter, | ||||
|   >( | ||||
|     args: UpdateManyResolverArgs<Record, Filter>, | ||||
|     args: UpdateManyResolverArgs<Partial<Record>, Filter>, | ||||
|     options: UpdateManyQueryFactoryOptions, | ||||
|   ): Promise<string> { | ||||
|     return this.updateManyQueryFactory.create(args, options); | ||||
|   | ||||
| @@ -3,13 +3,14 @@ import { Module } from '@nestjs/common'; | ||||
| import { ObjectMetadataModule } from 'src/engine/metadata-modules/object-metadata/object-metadata.module'; | ||||
| import { FieldsStringFactory } from 'src/engine/api/graphql/workspace-query-builder/factories/fields-string.factory'; | ||||
| import { RecordPositionQueryFactory } from 'src/engine/api/graphql/workspace-query-builder/factories/record-position-query.factory'; | ||||
| import { DuplicateModule } from 'src/engine/core-modules/duplicate/duplicate.module'; | ||||
|  | ||||
| import { WorkspaceQueryBuilderFactory } from './workspace-query-builder.factory'; | ||||
|  | ||||
| import { workspaceQueryBuilderFactories } from './factories/factories'; | ||||
|  | ||||
| @Module({ | ||||
|   imports: [ObjectMetadataModule], | ||||
|   imports: [ObjectMetadataModule, DuplicateModule], | ||||
|   providers: [...workspaceQueryBuilderFactories, WorkspaceQueryBuilderFactory], | ||||
|   exports: [ | ||||
|     WorkspaceQueryBuilderFactory, | ||||
|   | ||||
| @@ -152,8 +152,8 @@ describe('QueryRunnerArgsFactory', () => { | ||||
|       } as WorkspaceQueryRunnerOptions; | ||||
|  | ||||
|       const args = { | ||||
|         id: '123', | ||||
|         data: { testNumber: '1', otherField: 'test' }, | ||||
|         ids: ['123'], | ||||
|         data: [{ testNumber: '1', otherField: 'test' }], | ||||
|       }; | ||||
|  | ||||
|       const result = await factory.create( | ||||
| @@ -163,8 +163,8 @@ describe('QueryRunnerArgsFactory', () => { | ||||
|       ); | ||||
|  | ||||
|       expect(result).toEqual({ | ||||
|         id: 123, | ||||
|         data: { testNumber: 1, otherField: 'test' }, | ||||
|         ids: [123], | ||||
|         data: [{ testNumber: 1, position: 2, otherField: 'test' }], | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
|   | ||||
| @@ -10,7 +10,10 @@ import { | ||||
|   ResolverArgs, | ||||
|   ResolverArgsType, | ||||
| } from 'src/engine/api/graphql/workspace-resolver-builder/interfaces/workspace-resolvers-builder.interface'; | ||||
| import { RecordFilter } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
| import { | ||||
|   Record, | ||||
|   RecordFilter, | ||||
| } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { FieldMetadataType } from 'src/engine/metadata-modules/field-metadata/field-metadata.entity'; | ||||
| import { hasPositionField } from 'src/engine/metadata-modules/object-metadata/utils/has-position-field.util'; | ||||
| @@ -47,12 +50,12 @@ export class QueryRunnerArgsFactory { | ||||
|         return { | ||||
|           ...args, | ||||
|           data: await Promise.all( | ||||
|             (args as CreateManyResolverArgs).data.map((arg, index) => | ||||
|             (args as CreateManyResolverArgs).data?.map((arg, index) => | ||||
|               this.overrideDataByFieldMetadata(arg, options, fieldMetadataMap, { | ||||
|                 argIndex: index, | ||||
|                 shouldBackfillPosition, | ||||
|               }), | ||||
|             ), | ||||
|             ) ?? [], | ||||
|           ), | ||||
|         } satisfies CreateManyResolverArgs; | ||||
|       case ResolverArgsType.FindOne: | ||||
| @@ -75,25 +78,27 @@ export class QueryRunnerArgsFactory { | ||||
|       case ResolverArgsType.FindDuplicates: | ||||
|         return { | ||||
|           ...args, | ||||
|           id: await this.overrideValueByFieldMetadata( | ||||
|             'id', | ||||
|             (args as FindDuplicatesResolverArgs).id, | ||||
|             fieldMetadataMap, | ||||
|           ids: (await Promise.all( | ||||
|             (args as FindDuplicatesResolverArgs).ids?.map((id) => | ||||
|               this.overrideValueByFieldMetadata('id', id, fieldMetadataMap), | ||||
|             ) ?? [], | ||||
|           )) as string[], | ||||
|           data: await Promise.all( | ||||
|             (args as FindDuplicatesResolverArgs).data?.map((arg, index) => | ||||
|               this.overrideDataByFieldMetadata(arg, options, fieldMetadataMap, { | ||||
|                 argIndex: index, | ||||
|                 shouldBackfillPosition, | ||||
|               }), | ||||
|             ) ?? [], | ||||
|           ), | ||||
|           data: await this.overrideDataByFieldMetadata( | ||||
|             (args as FindDuplicatesResolverArgs).data, | ||||
|             options, | ||||
|             fieldMetadataMap, | ||||
|             { shouldBackfillPosition: false }, | ||||
|           ), | ||||
|         }; | ||||
|         } satisfies FindDuplicatesResolverArgs; | ||||
|       default: | ||||
|         return args; | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   private async overrideDataByFieldMetadata( | ||||
|     data: Record<string, any> | undefined, | ||||
|     data: Partial<Record> | undefined, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|     fieldMetadataMap: Map<string, FieldMetadataInterface>, | ||||
|     argPositionBackfillInput: ArgPositionBackfillInput, | ||||
|   | ||||
| @@ -10,6 +10,7 @@ import { ObjectMetadataRepositoryModule } from 'src/engine/object-metadata-repos | ||||
| import { TelemetryListener } from 'src/engine/api/graphql/workspace-query-runner/listeners/telemetry.listener'; | ||||
| import { AnalyticsModule } from 'src/engine/core-modules/analytics/analytics.module'; | ||||
| import { RecordPositionBackfillCommand } from 'src/engine/api/graphql/workspace-query-runner/commands/0-20-record-position-backfill.command'; | ||||
| import { DuplicateModule } from 'src/engine/core-modules/duplicate/duplicate.module'; | ||||
|  | ||||
| import { WorkspaceQueryRunnerService } from './workspace-query-runner.service'; | ||||
|  | ||||
| @@ -23,6 +24,7 @@ import { EntityEventsToDbListener } from './listeners/entity-events-to-db.listen | ||||
|     WorkspaceQueryHookModule, | ||||
|     ObjectMetadataRepositoryModule.forFeature([WorkspaceMemberWorkspaceEntity]), | ||||
|     AnalyticsModule, | ||||
|     DuplicateModule, | ||||
|   ], | ||||
|   providers: [ | ||||
|     WorkspaceQueryRunnerService, | ||||
|   | ||||
| @@ -52,6 +52,7 @@ import { STANDARD_OBJECT_IDS } from 'src/engine/workspace-manager/workspace-sync | ||||
| import { assertIsValidUuid } from 'src/engine/api/graphql/workspace-query-runner/utils/assert-is-valid-uuid.util'; | ||||
| import { isQueryTimeoutError } from 'src/engine/utils/query-timeout.util'; | ||||
| import { InjectMessageQueue } from 'src/engine/integrations/message-queue/decorators/message-queue.decorator'; | ||||
| import { DuplicateService } from 'src/engine/core-modules/duplicate/duplicate.service'; | ||||
|  | ||||
| import { WorkspaceQueryRunnerOptions } from './interfaces/query-runner-option.interface'; | ||||
| import { | ||||
| @@ -77,6 +78,7 @@ export class WorkspaceQueryRunnerService { | ||||
|     private readonly eventEmitter: EventEmitter2, | ||||
|     private readonly workspaceQueryHookService: WorkspaceQueryHookService, | ||||
|     private readonly environmentService: EnvironmentService, | ||||
|     private readonly duplicateService: DuplicateService, | ||||
|   ) {} | ||||
|  | ||||
|   async findMany< | ||||
| @@ -167,16 +169,16 @@ export class WorkspaceQueryRunnerService { | ||||
|   } | ||||
|  | ||||
|   async findDuplicates<TRecord extends IRecord = IRecord>( | ||||
|     args: FindDuplicatesResolverArgs<TRecord>, | ||||
|     args: FindDuplicatesResolverArgs<Partial<TRecord>>, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|   ): Promise<IConnection<TRecord> | undefined> { | ||||
|     if (!args.data && !args.id) { | ||||
|     if (!args.data && !args.ids) { | ||||
|       throw new BadRequestException( | ||||
|         'You have to provide either "data" or "id" argument', | ||||
|       ); | ||||
|     } | ||||
|  | ||||
|     if (!args.id && isEmpty(args.data)) { | ||||
|     if (!args.ids && isEmpty(args.data)) { | ||||
|       throw new BadRequestException( | ||||
|         'The "data" condition can not be empty when ID input not provided', | ||||
|       ); | ||||
| @@ -190,37 +192,24 @@ export class WorkspaceQueryRunnerService { | ||||
|       ResolverArgsType.FindDuplicates, | ||||
|     )) as FindDuplicatesResolverArgs<TRecord>; | ||||
|  | ||||
|     let existingRecord: Record<string, unknown> | undefined; | ||||
|     let existingRecords: IRecord[] | undefined = undefined; | ||||
|  | ||||
|     if (computedArgs.id) { | ||||
|       const existingRecordQuery = | ||||
|         this.workspaceQueryBuilderFactory.findDuplicatesExistingRecord( | ||||
|           computedArgs.id, | ||||
|           options, | ||||
|         ); | ||||
|  | ||||
|       const existingRecordResult = await this.execute( | ||||
|         existingRecordQuery, | ||||
|     if (computedArgs.ids && computedArgs.ids.length > 0) { | ||||
|       existingRecords = await this.duplicateService.findExistingRecords( | ||||
|         computedArgs.ids, | ||||
|         objectMetadataItem, | ||||
|         workspaceId, | ||||
|       ); | ||||
|  | ||||
|       const parsedResult = await this.parseResult<Record<string, unknown>>( | ||||
|         existingRecordResult, | ||||
|         objectMetadataItem, | ||||
|         '', | ||||
|       ); | ||||
|  | ||||
|       existingRecord = parsedResult?.edges?.[0]?.node; | ||||
|  | ||||
|       if (!existingRecord) { | ||||
|         throw new NotFoundError(`Object with id ${args.id} not found`); | ||||
|       if (!existingRecords || existingRecords.length === 0) { | ||||
|         throw new NotFoundError(`Object with id ${args.ids} not found`); | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     const query = await this.workspaceQueryBuilderFactory.findDuplicates( | ||||
|       computedArgs, | ||||
|       options, | ||||
|       existingRecord, | ||||
|       existingRecords, | ||||
|     ); | ||||
|  | ||||
|     await this.workspaceQueryHookService.executePreQueryHooks( | ||||
| @@ -237,17 +226,22 @@ export class WorkspaceQueryRunnerService { | ||||
|       result, | ||||
|       objectMetadataItem, | ||||
|       '', | ||||
|       true, | ||||
|     ); | ||||
|   } | ||||
|  | ||||
|   async createMany<Record extends IRecord = IRecord>( | ||||
|     args: CreateManyResolverArgs<Record>, | ||||
|     args: CreateManyResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|   ): Promise<Record[] | undefined> { | ||||
|     const { workspaceId, userId, objectMetadataItem } = options; | ||||
|  | ||||
|     assertMutationNotOnRemoteObject(objectMetadataItem); | ||||
|  | ||||
|     if (args.upsert) { | ||||
|       return await this.upsertMany(args, options); | ||||
|     } | ||||
|  | ||||
|     args.data.forEach((record) => { | ||||
|       if (record?.id) { | ||||
|         assertIsValidUuid(record.id); | ||||
| @@ -305,17 +299,73 @@ export class WorkspaceQueryRunnerService { | ||||
|     return parsedResults; | ||||
|   } | ||||
|  | ||||
|   async upsertMany<Record extends IRecord = IRecord>( | ||||
|     args: CreateManyResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|   ): Promise<Record[] | undefined> { | ||||
|     const ids = args.data | ||||
|       .map((item) => item.id) | ||||
|       .filter((id) => id !== undefined); | ||||
|  | ||||
|     const existingRecords = | ||||
|       ids.length > 0 | ||||
|         ? await this.duplicateService.findExistingRecords( | ||||
|             ids as string[], | ||||
|             options.objectMetadataItem, | ||||
|             options.workspaceId, | ||||
|           ) | ||||
|         : []; | ||||
|  | ||||
|     const existingRecordsMap = new Map( | ||||
|       existingRecords.map((record) => [record.id, record]), | ||||
|     ); | ||||
|  | ||||
|     const results: Record[] = []; | ||||
|     const recordsToCreate: Partial<Record>[] = []; | ||||
|  | ||||
|     for (const payload of args.data) { | ||||
|       if (payload.id && existingRecordsMap.has(payload.id)) { | ||||
|         const result = await this.updateOne( | ||||
|           { id: payload.id, data: payload }, | ||||
|           options, | ||||
|         ); | ||||
|  | ||||
|         if (result) { | ||||
|           results.push(result); | ||||
|         } | ||||
|       } else { | ||||
|         recordsToCreate.push(payload); | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     if (recordsToCreate.length > 0) { | ||||
|       const createResults = await this.createMany( | ||||
|         { data: recordsToCreate } as CreateManyResolverArgs<Partial<Record>>, | ||||
|         options, | ||||
|       ); | ||||
|  | ||||
|       if (createResults) { | ||||
|         results.push(...createResults); | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     return results; | ||||
|   } | ||||
|  | ||||
|   async createOne<Record extends IRecord = IRecord>( | ||||
|     args: CreateOneResolverArgs<Record>, | ||||
|     args: CreateOneResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|   ): Promise<Record | undefined> { | ||||
|     const results = await this.createMany({ data: [args.data] }, options); | ||||
|     const results = await this.createMany( | ||||
|       { data: [args.data], upsert: args.upsert }, | ||||
|       options, | ||||
|     ); | ||||
|  | ||||
|     return results?.[0]; | ||||
|   } | ||||
|  | ||||
|   async updateOne<Record extends IRecord = IRecord>( | ||||
|     args: UpdateOneResolverArgs<Record>, | ||||
|     args: UpdateOneResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|   ): Promise<Record | undefined> { | ||||
|     const { workspaceId, userId, objectMetadataItem } = options; | ||||
| @@ -373,7 +423,7 @@ export class WorkspaceQueryRunnerService { | ||||
|   } | ||||
|  | ||||
|   async updateMany<Record extends IRecord = IRecord>( | ||||
|     args: UpdateManyResolverArgs<Record>, | ||||
|     args: UpdateManyResolverArgs<Partial<Record>>, | ||||
|     options: WorkspaceQueryRunnerOptions, | ||||
|   ): Promise<Record[] | undefined> { | ||||
|     const { userId, workspaceId, objectMetadataItem } = options; | ||||
| @@ -609,11 +659,21 @@ export class WorkspaceQueryRunnerService { | ||||
|     graphqlResult: PGGraphQLResult | undefined, | ||||
|     objectMetadataItem: ObjectMetadataInterface, | ||||
|     command: string, | ||||
|     isMultiQuery = false, | ||||
|   ): Promise<Result> { | ||||
|     const entityKey = `${command}${computeObjectTargetTable( | ||||
|       objectMetadataItem, | ||||
|     )}Collection`; | ||||
|     const result = graphqlResult?.[0]?.resolve?.data?.[entityKey]; | ||||
|     const result = !isMultiQuery | ||||
|       ? graphqlResult?.[0]?.resolve?.data?.[entityKey] | ||||
|       : Object.keys(graphqlResult?.[0]?.resolve?.data).reduce( | ||||
|           (acc: IRecord[], dataItem, index) => { | ||||
|             acc.push(graphqlResult?.[0]?.resolve?.data[`${entityKey}${index}`]); | ||||
|  | ||||
|             return acc; | ||||
|           }, | ||||
|           [], | ||||
|         ); | ||||
|     const errors = graphqlResult?.[0]?.resolve?.errors; | ||||
|  | ||||
|     if ( | ||||
|   | ||||
| @@ -39,26 +39,36 @@ export interface FindOneResolverArgs<Filter = any> { | ||||
|   filter?: Filter; | ||||
| } | ||||
|  | ||||
| export interface FindDuplicatesResolverArgs<Data extends Record = Record> { | ||||
|   id?: string; | ||||
|   data?: Data; | ||||
| export interface FindDuplicatesResolverArgs< | ||||
|   Data extends Partial<Record> = Partial<Record>, | ||||
| > { | ||||
|   ids?: string[]; | ||||
|   data?: Data[]; | ||||
| } | ||||
|  | ||||
| export interface CreateOneResolverArgs<Data extends Record = Record> { | ||||
| export interface CreateOneResolverArgs< | ||||
|   Data extends Partial<Record> = Partial<Record>, | ||||
| > { | ||||
|   data: Data; | ||||
|   upsert?: boolean; | ||||
| } | ||||
|  | ||||
| export interface CreateManyResolverArgs<Data extends Record = Record> { | ||||
| export interface CreateManyResolverArgs< | ||||
|   Data extends Partial<Record> = Partial<Record>, | ||||
| > { | ||||
|   data: Data[]; | ||||
|   upsert?: boolean; | ||||
| } | ||||
|  | ||||
| export interface UpdateOneResolverArgs<Data extends Record = Record> { | ||||
| export interface UpdateOneResolverArgs< | ||||
|   Data extends Partial<Record> = Partial<Record>, | ||||
| > { | ||||
|   id: string; | ||||
|   data: Data; | ||||
| } | ||||
|  | ||||
| export interface UpdateManyResolverArgs< | ||||
|   Data extends Record = Record, | ||||
|   Data extends Partial<Record> = Partial<Record>, | ||||
|   Filter = any, | ||||
| > { | ||||
|   filter: Filter; | ||||
|   | ||||
| @@ -102,9 +102,12 @@ export class RootTypeFactory { | ||||
|         } | ||||
|  | ||||
|         const outputType = this.typeMapperService.mapToGqlType(objectType, { | ||||
|           isArray: ['updateMany', 'deleteMany', 'createMany'].includes( | ||||
|             methodName, | ||||
|           ), | ||||
|           isArray: [ | ||||
|             'updateMany', | ||||
|             'deleteMany', | ||||
|             'createMany', | ||||
|             'findDuplicates', | ||||
|           ].includes(methodName), | ||||
|         }); | ||||
|  | ||||
|         fieldConfigMap[name] = { | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| import { GraphQLID, GraphQLInt, GraphQLString } from 'graphql'; | ||||
| import { GraphQLBoolean, GraphQLID, GraphQLInt, GraphQLString } from 'graphql'; | ||||
|  | ||||
| import { WorkspaceResolverBuilderMethodNames } from 'src/engine/api/graphql/workspace-resolver-builder/interfaces/workspace-resolvers-builder.interface'; | ||||
|  | ||||
| @@ -29,9 +29,19 @@ describe('getResolverArgs', () => { | ||||
|         isNullable: false, | ||||
|         isArray: true, | ||||
|       }, | ||||
|       upsert: { | ||||
|         isArray: false, | ||||
|         isNullable: true, | ||||
|         type: GraphQLBoolean, | ||||
|       }, | ||||
|     }, | ||||
|     createOne: { | ||||
|       data: { kind: InputTypeDefinitionKind.Create, isNullable: false }, | ||||
|       upsert: { | ||||
|         isArray: false, | ||||
|         isNullable: true, | ||||
|         type: GraphQLBoolean, | ||||
|       }, | ||||
|     }, | ||||
|     updateOne: { | ||||
|       id: { type: GraphQLID, isNullable: false }, | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| import { GraphQLString, GraphQLInt, GraphQLID } from 'graphql'; | ||||
| import { GraphQLString, GraphQLInt, GraphQLID, GraphQLBoolean } from 'graphql'; | ||||
|  | ||||
| import { WorkspaceResolverBuilderMethodNames } from 'src/engine/api/graphql/workspace-resolver-builder/interfaces/workspace-resolvers-builder.interface'; | ||||
| import { ArgMetadata } from 'src/engine/api/graphql/workspace-schema-builder/interfaces/param-metadata.interface'; | ||||
| @@ -56,6 +56,11 @@ export const getResolverArgs = ( | ||||
|           isNullable: false, | ||||
|           isArray: true, | ||||
|         }, | ||||
|         upsert: { | ||||
|           type: GraphQLBoolean, | ||||
|           isNullable: true, | ||||
|           isArray: false, | ||||
|         }, | ||||
|       }; | ||||
|     case 'createOne': | ||||
|       return { | ||||
| @@ -63,6 +68,11 @@ export const getResolverArgs = ( | ||||
|           kind: InputTypeDefinitionKind.Create, | ||||
|           isNullable: false, | ||||
|         }, | ||||
|         upsert: { | ||||
|           type: GraphQLBoolean, | ||||
|           isNullable: true, | ||||
|           isArray: false, | ||||
|         }, | ||||
|       }; | ||||
|     case 'updateOne': | ||||
|       return { | ||||
| @@ -77,13 +87,15 @@ export const getResolverArgs = ( | ||||
|       }; | ||||
|     case 'findDuplicates': | ||||
|       return { | ||||
|         id: { | ||||
|         ids: { | ||||
|           type: GraphQLID, | ||||
|           isNullable: true, | ||||
|           isArray: true, | ||||
|         }, | ||||
|         data: { | ||||
|           kind: InputTypeDefinitionKind.Create, | ||||
|           isNullable: true, | ||||
|           isArray: true, | ||||
|         }, | ||||
|       }; | ||||
|     case 'deleteOne': | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| import { BadRequestException } from '@nestjs/common'; | ||||
|  | ||||
| import { ObjectMetadataInterface } from 'src/engine/metadata-modules/field-metadata/interfaces/object-metadata.interface'; | ||||
| import { Record } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { compositeTypeDefintions } from 'src/engine/metadata-modules/field-metadata/composite-types'; | ||||
| import { isCompositeFieldMetadataType } from 'src/engine/metadata-modules/field-metadata/utils/is-composite-field-metadata-type.util'; | ||||
| @@ -8,7 +9,7 @@ import { computeObjectTargetTable } from 'src/engine/utils/compute-object-target | ||||
|  | ||||
| export const checkArrayFields = ( | ||||
|   objectMetadata: ObjectMetadataInterface, | ||||
|   fields: Array<Record<string, any>>, | ||||
|   fields: Array<Partial<Record>>, | ||||
| ): void => { | ||||
|   const fieldMetadataNames = objectMetadata.fields | ||||
|     .map((field) => { | ||||
|   | ||||
| @@ -0,0 +1,11 @@ | ||||
| import { Module } from '@nestjs/common'; | ||||
|  | ||||
| import { DuplicateService } from 'src/engine/core-modules/duplicate/duplicate.service'; | ||||
| import { WorkspaceDataSourceModule } from 'src/engine/workspace-datasource/workspace-datasource.module'; | ||||
|  | ||||
| @Module({ | ||||
|   imports: [WorkspaceDataSourceModule], | ||||
|   exports: [DuplicateService], | ||||
|   providers: [DuplicateService], | ||||
| }) | ||||
| export class DuplicateModule {} | ||||
| @@ -0,0 +1,173 @@ | ||||
| import { Injectable } from '@nestjs/common'; | ||||
|  | ||||
| import { ObjectMetadataInterface } from 'src/engine/metadata-modules/field-metadata/interfaces/object-metadata.interface'; | ||||
| import { | ||||
|   Record as IRecord, | ||||
|   Record, | ||||
| } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { settings } from 'src/engine/constants/settings'; | ||||
| import { computeObjectTargetTable } from 'src/engine/utils/compute-object-target-table.util'; | ||||
| import { WorkspaceDataSourceService } from 'src/engine/workspace-datasource/workspace-datasource.service'; | ||||
| import { DUPLICATE_CRITERIA_COLLECTION } from 'src/engine/core-modules/duplicate/constants/duplicate-criteria.constants'; | ||||
|  | ||||
| @Injectable() | ||||
| export class DuplicateService { | ||||
|   constructor( | ||||
|     private readonly workspaceDataSourceService: WorkspaceDataSourceService, | ||||
|   ) {} | ||||
|  | ||||
|   async findExistingRecords( | ||||
|     recordIds: (string | number)[], | ||||
|     objectMetadata: ObjectMetadataInterface, | ||||
|     workspaceId: string, | ||||
|   ) { | ||||
|     const dataSourceSchema = | ||||
|       this.workspaceDataSourceService.getSchemaName(workspaceId); | ||||
|  | ||||
|     const results = await this.workspaceDataSourceService.executeRawQuery( | ||||
|       ` | ||||
|           SELECT  | ||||
|              * | ||||
|           FROM | ||||
|               ${dataSourceSchema}."${computeObjectTargetTable( | ||||
|                 objectMetadata, | ||||
|               )}" p | ||||
|           WHERE | ||||
|               p."id" IN (${recordIds | ||||
|                 .map((_, index) => `$${index + 1}`) | ||||
|                 .join(', ')}) | ||||
|           `, | ||||
|       recordIds, | ||||
|       workspaceId, | ||||
|     ); | ||||
|  | ||||
|     return results as IRecord[]; | ||||
|   } | ||||
|  | ||||
|   buildDuplicateConditionForGraphQL( | ||||
|     objectMetadata: ObjectMetadataInterface, | ||||
|     argsData?: Partial<Record>, | ||||
|     filteringByExistingRecordId?: string, | ||||
|   ) { | ||||
|     if (!argsData) { | ||||
|       return; | ||||
|     } | ||||
|  | ||||
|     const criteriaCollection = | ||||
|       this.getApplicableDuplicateCriteriaCollection(objectMetadata); | ||||
|  | ||||
|     const criteriaWithMatchingArgs = criteriaCollection.filter((criteria) => | ||||
|       criteria.columnNames.every((columnName) => { | ||||
|         const value = argsData[columnName] as string | undefined; | ||||
|  | ||||
|         return ( | ||||
|           !!value && value.length >= settings.minLengthOfStringForDuplicateCheck | ||||
|         ); | ||||
|       }), | ||||
|     ); | ||||
|  | ||||
|     const filterCriteria = criteriaWithMatchingArgs.map((criteria) => | ||||
|       Object.fromEntries( | ||||
|         criteria.columnNames.map((columnName) => [ | ||||
|           columnName, | ||||
|           { eq: argsData[columnName] }, | ||||
|         ]), | ||||
|       ), | ||||
|     ); | ||||
|  | ||||
|     return { | ||||
|       // when filtering by an existing record, we need to filter that explicit record out | ||||
|       ...(filteringByExistingRecordId && { | ||||
|         id: { neq: filteringByExistingRecordId }, | ||||
|       }), | ||||
|       // keep condition as "or" to get results by more duplicate criteria | ||||
|       or: filterCriteria, | ||||
|     }; | ||||
|   } | ||||
|  | ||||
|   private getApplicableDuplicateCriteriaCollection( | ||||
|     objectMetadataItem: ObjectMetadataInterface, | ||||
|   ) { | ||||
|     return DUPLICATE_CRITERIA_COLLECTION.filter( | ||||
|       (duplicateCriteria) => | ||||
|         duplicateCriteria.objectName === objectMetadataItem.nameSingular, | ||||
|     ); | ||||
|   } | ||||
|  | ||||
|   /** | ||||
|    * TODO: Remove this code by September 1st, 2024 if it isn't used | ||||
|    * It was build to be used by the upsertMany function, but it was not used. | ||||
|    * It's a re-implementation of the methods to findDuplicates, but done | ||||
|    * at the SQL layer instead of doing it at the GraphQL layer | ||||
|    *  | ||||
|   async findDuplicate( | ||||
|     data: Partial<Record>, | ||||
|     objectMetadata: ObjectMetadataInterface, | ||||
|     workspaceId: string, | ||||
|   ) { | ||||
|     const dataSourceSchema = | ||||
|       this.workspaceDataSourceService.getSchemaName(workspaceId); | ||||
|  | ||||
|     const { duplicateWhereClause, duplicateWhereParameters } = | ||||
|       this.buildDuplicateConditionForUpsert(objectMetadata, data); | ||||
|  | ||||
|     const results = await this.workspaceDataSourceService.executeRawQuery( | ||||
|       ` | ||||
|           SELECT  | ||||
|              * | ||||
|           FROM | ||||
|               ${dataSourceSchema}."${computeObjectTargetTable( | ||||
|                 objectMetadata, | ||||
|               )}" p | ||||
|           WHERE | ||||
|               ${duplicateWhereClause} | ||||
|           `, | ||||
|       duplicateWhereParameters, | ||||
|       workspaceId, | ||||
|     ); | ||||
|  | ||||
|     return results.length > 0 ? results[0] : null; | ||||
|   } | ||||
|  | ||||
|   private buildDuplicateConditionForUpsert( | ||||
|     objectMetadata: ObjectMetadataInterface, | ||||
|     data: Partial<Record>, | ||||
|   ) { | ||||
|     const criteriaCollection = this.getApplicableDuplicateCriteriaCollection( | ||||
|       objectMetadata, | ||||
|     ).filter( | ||||
|       (duplicateCriteria) => duplicateCriteria.useAsUniqueKeyForUpsert === true, | ||||
|     ); | ||||
|  | ||||
|     const whereClauses: string[] = []; | ||||
|     const whereParameters: any[] = []; | ||||
|     let parameterIndex = 1; | ||||
|  | ||||
|     criteriaCollection.forEach((c) => { | ||||
|       const clauseParts: string[] = []; | ||||
|  | ||||
|       c.columnNames.forEach((column) => { | ||||
|         const dataKey = Object.keys(data).find( | ||||
|           (key) => key.toLowerCase() === column.toLowerCase(), | ||||
|         ); | ||||
|  | ||||
|         if (dataKey) { | ||||
|           clauseParts.push(`p."${column}" = $${parameterIndex}`); | ||||
|           whereParameters.push(data[dataKey]); | ||||
|           parameterIndex++; | ||||
|         } | ||||
|       }); | ||||
|       if (clauseParts.length > 0) { | ||||
|         whereClauses.push(`(${clauseParts.join(' AND ')})`); | ||||
|       } | ||||
|     }); | ||||
|  | ||||
|     const duplicateWhereClause = whereClauses.join(' OR '); | ||||
|     const duplicateWhereParameters = whereParameters; | ||||
|  | ||||
|     return { duplicateWhereClause, duplicateWhereParameters }; | ||||
|   } | ||||
|   * | ||||
|   */ | ||||
| } | ||||
| @@ -22,8 +22,16 @@ const mockObjectMetadata: ObjectMetadataInterface = { | ||||
|  | ||||
| describe('objectRecordChangedValues', () => { | ||||
|   it('detects changes in scalar values correctly', () => { | ||||
|     const oldRecord = { id: 1, name: 'Original Name', updatedAt: new Date() }; | ||||
|     const newRecord = { id: 1, name: 'Updated Name', updatedAt: new Date() }; | ||||
|     const oldRecord = { | ||||
|       id: '74316f58-29b0-4a6a-b8fa-d2b506d5516m', | ||||
|       name: 'Original Name', | ||||
|       updatedAt: new Date().toString(), | ||||
|     }; | ||||
|     const newRecord = { | ||||
|       id: '74316f58-29b0-4a6a-b8fa-d2b506d5516m', | ||||
|       name: 'Updated Name', | ||||
|       updatedAt: new Date().toString(), | ||||
|     }; | ||||
|  | ||||
|     const result = objectRecordChangedValues( | ||||
|       oldRecord, | ||||
| @@ -38,8 +46,14 @@ describe('objectRecordChangedValues', () => { | ||||
| }); | ||||
|  | ||||
| it('ignores changes to the updatedAt field', () => { | ||||
|   const oldRecord = { id: 1, updatedAt: new Date('2020-01-01') }; | ||||
|   const newRecord = { id: 1, updatedAt: new Date('2024-01-01') }; | ||||
|   const oldRecord = { | ||||
|     id: '74316f58-29b0-4a6a-b8fa-d2b506d5516d', | ||||
|     updatedAt: new Date('2020-01-01').toDateString(), | ||||
|   }; | ||||
|   const newRecord = { | ||||
|     id: '74316f58-29b0-4a6a-b8fa-d2b506d5516d', | ||||
|     updatedAt: new Date('2024-01-01').toDateString(), | ||||
|   }; | ||||
|  | ||||
|   const result = objectRecordChangedValues( | ||||
|     oldRecord, | ||||
| @@ -51,8 +65,16 @@ it('ignores changes to the updatedAt field', () => { | ||||
| }); | ||||
|  | ||||
| it('returns an empty object when there are no changes', () => { | ||||
|   const oldRecord = { id: 1, name: 'Name', value: 100 }; | ||||
|   const newRecord = { id: 1, name: 'Name', value: 100 }; | ||||
|   const oldRecord = { | ||||
|     id: '74316f58-29b0-4a6a-b8fa-d2b506d5516k', | ||||
|     name: 'Name', | ||||
|     value: 100, | ||||
|   }; | ||||
|   const newRecord = { | ||||
|     id: '74316f58-29b0-4a6a-b8fa-d2b506d5516k', | ||||
|     name: 'Name', | ||||
|     value: 100, | ||||
|   }; | ||||
|  | ||||
|   const result = objectRecordChangedValues( | ||||
|     oldRecord, | ||||
| @@ -65,17 +87,17 @@ it('returns an empty object when there are no changes', () => { | ||||
|  | ||||
| it('correctly handles a mix of changed, unchanged, and special case values', () => { | ||||
|   const oldRecord = { | ||||
|     id: 1, | ||||
|     id: '74316f58-29b0-4a6a-b8fa-d2b506d5516l', | ||||
|     name: 'Original', | ||||
|     status: 'active', | ||||
|     updatedAt: new Date(2020, 1, 1), | ||||
|     updatedAt: new Date(2020, 1, 1).toDateString(), | ||||
|     config: { theme: 'dark' }, | ||||
|   }; | ||||
|   const newRecord = { | ||||
|     id: 1, | ||||
|     id: '74316f58-29b0-4a6a-b8fa-d2b506d5516l', | ||||
|     name: 'Updated', | ||||
|     status: 'active', | ||||
|     updatedAt: new Date(2021, 1, 1), | ||||
|     updatedAt: new Date(2021, 1, 1).toDateString(), | ||||
|     config: { theme: 'light' }, | ||||
|   }; | ||||
|   const expectedChanges = { | ||||
|   | ||||
| @@ -1,8 +1,14 @@ | ||||
| import deepEqual from 'deep-equal'; | ||||
|  | ||||
| export const objectRecordChangedProperties = ( | ||||
|   oldRecord: Record<string, any>, | ||||
|   newRecord: Record<string, any>, | ||||
| import { Record } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { BaseWorkspaceEntity } from 'src/engine/twenty-orm/base.workspace-entity'; | ||||
|  | ||||
| export const objectRecordChangedProperties = < | ||||
|   PRecord extends Partial<Record | BaseWorkspaceEntity> = Partial<Record>, | ||||
| >( | ||||
|   oldRecord: PRecord, | ||||
|   newRecord: PRecord, | ||||
| ) => { | ||||
|   const changedProperties = Object.keys(newRecord).filter( | ||||
|     (key) => !deepEqual(oldRecord[key], newRecord[key]), | ||||
|   | ||||
| @@ -1,12 +1,13 @@ | ||||
| import deepEqual from 'deep-equal'; | ||||
|  | ||||
| import { ObjectMetadataInterface } from 'src/engine/metadata-modules/field-metadata/interfaces/object-metadata.interface'; | ||||
| import { Record as IRecord } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { FieldMetadataType } from 'src/engine/metadata-modules/field-metadata/field-metadata.entity'; | ||||
|  | ||||
| export const objectRecordChangedValues = ( | ||||
|   oldRecord: Record<string, any>, | ||||
|   newRecord: Record<string, any>, | ||||
|   oldRecord: Partial<IRecord>, | ||||
|   newRecord: Partial<IRecord>, | ||||
|   objectMetadata: ObjectMetadataInterface, | ||||
| ) => { | ||||
|   const changedValues = Object.keys(newRecord).reduce( | ||||
|   | ||||
| @@ -39,7 +39,7 @@ export class SyncDriver implements MessageQueueDriver { | ||||
|     }); | ||||
|   } | ||||
|  | ||||
|   async removeCron(queueName: MessageQueue, jobName: string) { | ||||
|   async removeCron(queueName: MessageQueue) { | ||||
|     this.logger.log(`Removing '${queueName}' cron job with SyncDriver`); | ||||
|   } | ||||
|  | ||||
|   | ||||
| @@ -47,7 +47,7 @@ export class ParticipantWorkspaceMemberListener { | ||||
|     payload: ObjectRecordUpdateEvent<WorkspaceMemberWorkspaceEntity>, | ||||
|   ) { | ||||
|     if ( | ||||
|       objectRecordUpdateEventChangedProperties( | ||||
|       objectRecordUpdateEventChangedProperties<WorkspaceMemberWorkspaceEntity>( | ||||
|         payload.properties.before, | ||||
|         payload.properties.after, | ||||
|       ).includes('userEmail') | ||||
|   | ||||
| @@ -2,6 +2,8 @@ import { Injectable } from '@nestjs/common'; | ||||
|  | ||||
| import { EntityManager } from 'typeorm'; | ||||
|  | ||||
| import { Record } from 'src/engine/api/graphql/workspace-query-builder/interfaces/record.interface'; | ||||
|  | ||||
| import { WorkspaceDataSourceService } from 'src/engine/workspace-datasource/workspace-datasource.service'; | ||||
| import { objectRecordDiffMerge } from 'src/engine/integrations/event-emitter/utils/object-record-diff-merge'; | ||||
|  | ||||
| @@ -13,7 +15,7 @@ export class TimelineActivityRepository { | ||||
|  | ||||
|   async upsertOne( | ||||
|     name: string, | ||||
|     properties: Record<string, any>, | ||||
|     properties: Partial<Record>, | ||||
|     objectName: string, | ||||
|     recordId: string, | ||||
|     workspaceId: string, | ||||
| @@ -103,7 +105,7 @@ export class TimelineActivityRepository { | ||||
|   private async updateTimelineActivity( | ||||
|     dataSourceSchema: string, | ||||
|     id: string, | ||||
|     properties: Record<string, any>, | ||||
|     properties: Partial<Record>, | ||||
|     workspaceMemberId: string | undefined, | ||||
|     workspaceId: string, | ||||
|   ) { | ||||
| @@ -119,7 +121,7 @@ export class TimelineActivityRepository { | ||||
|   private async insertTimelineActivity( | ||||
|     dataSourceSchema: string, | ||||
|     name: string, | ||||
|     properties: Record<string, any>, | ||||
|     properties: Partial<Record>, | ||||
|     objectName: string, | ||||
|     recordId: string, | ||||
|     workspaceMemberId: string | undefined, | ||||
| @@ -149,7 +151,7 @@ export class TimelineActivityRepository { | ||||
|     objectName: string, | ||||
|     activities: { | ||||
|       name: string; | ||||
|       properties: Record<string, any> | null; | ||||
|       properties: Partial<Record> | null; | ||||
|       workspaceMemberId: string | undefined; | ||||
|       recordId: string | null; | ||||
|       linkedRecordCachedName: string; | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Félix Malfait
					Félix Malfait