Upsert endpoint and CSV import upsert (#5970)

This PR introduces an `upsert` parameter (along the existing `data`
param) for `createOne` and `createMany` mutations.

When upsert is set to `true`, the function will look for records with
the same id if an id was passed. If not id was passed, it will leverage
the existing duplicate check mechanism to find a duplicate. If a record
is found, then the function will perform an update instead of a create.

Unfortunately I had to remove some nice tests that existing on the args
factory. Those tests where mostly testing the duplication rule
generation logic but through a GraphQL angle. Since I moved the
duplication rule logic to a dedicated service, if I kept the tests but
mocked the service we wouldn't really be testing anything useful. The
right path would be to create new tests for this service that compare
the JSON output and not the GraphQL output but I chose not to work on
this as it's equivalent to rewriting the tests from scratch and I have
other competing priorities.
This commit is contained in:
Félix Malfait
2024-06-26 11:39:16 +02:00
committed by GitHub
parent 1736aee7ff
commit cf67ed09d0
43 changed files with 609 additions and 531 deletions

View File

@@ -1,5 +1,5 @@
module.exports = {
schema: process.env.REACT_APP_SERVER_BASE_URL + '/metadata',
schema: (process.env.REACT_APP_SERVER_BASE_URL ?? 'http://localhost:3000') + '/metadata',
documents: [
'./src/modules/databases/graphql/**/*.ts',
'./src/modules/object-metadata/graphql/*.ts',

View File

@@ -1,5 +1,5 @@
module.exports = {
schema: process.env.REACT_APP_SERVER_BASE_URL + '/graphql',
schema: (process.env.REACT_APP_SERVER_BASE_URL ?? 'http://localhost:3000') + '/graphql',
documents: [
'!./src/modules/databases/**',
'!./src/modules/object-metadata/**',

View File

@@ -291,7 +291,9 @@ export type Mutation = {
deleteCurrentWorkspace: Workspace;
deleteOneObject: Object;
deleteUser: User;
disablePostgresProxy: PostgresCredentials;
emailPasswordResetLink: EmailPasswordResetLink;
enablePostgresProxy: PostgresCredentials;
exchangeAuthorizationCode: ExchangeAuthCode;
generateApiKeyToken: ApiKeyToken;
generateJWT: AuthTokens;
@@ -483,6 +485,14 @@ export type PageInfo = {
startCursor?: Maybe<Scalars['ConnectionCursor']>;
};
export type PostgresCredentials = {
__typename?: 'PostgresCredentials';
id: Scalars['UUID'];
password: Scalars['String'];
user: Scalars['String'];
workspaceId: Scalars['String'];
};
export type ProductPriceEntity = {
__typename?: 'ProductPriceEntity';
created: Scalars['Float'];
@@ -506,6 +516,7 @@ export type Query = {
currentUser: User;
currentWorkspace: Workspace;
findWorkspaceFromInviteHash: Workspace;
getPostgresCredentials?: Maybe<PostgresCredentials>;
getProductPrices: ProductPricesEntity;
getTimelineCalendarEventsFromCompanyId: TimelineCalendarEventsWithTotal;
getTimelineCalendarEventsFromPersonId: TimelineCalendarEventsWithTotal;
@@ -1061,8 +1072,6 @@ export type GetTimelineThreadsFromPersonIdQueryVariables = Exact<{
export type GetTimelineThreadsFromPersonIdQuery = { __typename?: 'Query', getTimelineThreadsFromPersonId: { __typename?: 'TimelineThreadsWithTotal', totalNumberOfThreads: number, timelineThreads: Array<{ __typename?: 'TimelineThread', id: any, read: boolean, visibility: MessageChannelVisibility, lastMessageReceivedAt: string, lastMessageBody: string, subject: string, numberOfMessagesInThread: number, participantCount: number, firstParticipant: { __typename?: 'TimelineThreadParticipant', personId?: any | null, workspaceMemberId?: any | null, firstName: string, lastName: string, displayName: string, avatarUrl: string, handle: string }, lastTwoParticipants: Array<{ __typename?: 'TimelineThreadParticipant', personId?: any | null, workspaceMemberId?: any | null, firstName: string, lastName: string, displayName: string, avatarUrl: string, handle: string }> }> } };
export type TimelineThreadFragment = { __typename?: 'TimelineThread', id: any, subject: string, lastMessageReceivedAt: string };
export type TrackMutationVariables = Exact<{
type: Scalars['String'];
data: Scalars['JSON'];
@@ -1364,13 +1373,6 @@ export const TimelineThreadsWithTotalFragmentFragmentDoc = gql`
}
}
${TimelineThreadFragmentFragmentDoc}`;
export const TimelineThreadFragmentDoc = gql`
fragment timelineThread on TimelineThread {
id
subject
lastMessageReceivedAt
}
`;
export const AuthTokenFragmentFragmentDoc = gql`
fragment AuthTokenFragment on AuthToken {
token

View File

@@ -0,0 +1,5 @@
import { RecordGqlConnection } from '@/object-record/graphql/types/RecordGqlConnection';
export type RecordGqlOperationFindDuplicatesResult = {
[objectNamePlural: string]: RecordGqlConnection[];
};

View File

@@ -3,8 +3,8 @@ import { gql } from '@apollo/client';
import { Person } from '@/people/types/Person';
export const query = gql`
mutation CreatePeople($data: [PersonCreateInput!]!) {
createPeople(data: $data) {
mutation CreatePeople($data: [PersonCreateInput!]!, $upsert: Boolean) {
createPeople(data: $data, upsert: $upsert) {
__typename
xLink {
label

View File

@@ -4,8 +4,8 @@ import { getPeopleMock } from '~/testing/mock-data/people';
const peopleMock = getPeopleMock();
export const query = gql`
query FindDuplicatePerson($id: ID!) {
personDuplicates(id: $id) {
query FindDuplicatePerson($ids: [ID!]!) {
personDuplicates(ids: $ids) {
edges {
node {
__typename
@@ -38,32 +38,32 @@ export const query = gql`
startCursor
endCursor
}
totalCount
}
}
`;
export const variables = {
id: '6205681e-7c11-40b4-9e32-f523dbe54590',
ids: ['6205681e-7c11-40b4-9e32-f523dbe54590'],
};
export const responseData = {
personDuplicates: {
edges: [
{
node: { ...peopleMock[0], updatedAt: '' },
cursor: 'cursor1',
personDuplicates: [
{
edges: [
{
node: { ...peopleMock[0], updatedAt: '' },
cursor: 'cursor1',
},
{
node: { ...peopleMock[1], updatedAt: '' },
cursor: 'cursor2',
},
],
pageInfo: {
hasNextPage: false,
startCursor: 'cursor1',
endCursor: 'cursor2',
},
{
node: { ...peopleMock[1], updatedAt: '' },
cursor: 'cursor2',
},
],
pageInfo: {
hasNextPage: false,
startCursor: 'cursor1',
endCursor: 'cursor2',
},
totalCount: 2,
},
],
};

View File

@@ -5,8 +5,8 @@ import { RecoilRoot } from 'recoil';
import { useCreateManyRecordsMutation } from '@/object-record/hooks/useCreateManyRecordsMutation';
const expectedQueryTemplate = `
mutation CreatePeople($data: [PersonCreateInput!]!) {
createPeople(data: $data) {
mutation CreatePeople($data: [PersonCreateInput!]!, $upsert: Boolean) {
createPeople(data: $data, upsert: $upsert) {
__typename
xLink {
label

View File

@@ -42,7 +42,7 @@ describe('useFindDuplicateRecords', () => {
const { result } = renderHook(
() =>
useFindDuplicateRecords({
objectRecordId,
objectRecordIds: [objectRecordId],
objectNameSingular,
}),
{
@@ -54,7 +54,7 @@ describe('useFindDuplicateRecords', () => {
await waitFor(() => {
expect(result.current.loading).toBe(false);
expect(result.current.records).toBeDefined();
expect(result.current.results).toBeDefined();
});
expect(mocks[0].result).toHaveBeenCalled();

View File

@@ -5,8 +5,8 @@ import { RecoilRoot } from 'recoil';
import { useFindDuplicateRecordsQuery } from '@/object-record/hooks/useFindDuplicatesRecordsQuery';
const expectedQueryTemplate = `
query FindDuplicatePerson($id: ID!) {
personDuplicates(id: $id) {
query FindDuplicatePerson($ids: [ID!]!) {
personDuplicates(ids: $ids) {
edges {
node {
__typename
@@ -39,7 +39,6 @@ const expectedQueryTemplate = `
startCursor
endCursor
}
totalCount
}
}
`.replace(/\s/g, '');

View File

@@ -49,10 +49,11 @@ export const useCreateManyRecords = <
const createManyRecords = async (
recordsToCreate: Partial<CreatedObjectRecord>[],
upsert?: boolean,
) => {
const sanitizedCreateManyRecordsInput = recordsToCreate.map(
(recordToCreate) => {
const idForCreation = recordToCreate?.id ?? v4();
const idForCreation = recordToCreate?.id ?? (upsert ? undefined : v4());
return {
...sanitizeRecordInput({
@@ -67,8 +68,12 @@ export const useCreateManyRecords = <
const recordsCreatedInCache = [];
for (const recordToCreate of sanitizedCreateManyRecordsInput) {
if (recordToCreate.id === null) {
continue;
}
const recordCreatedInCache = createOneRecordInCache({
...recordToCreate,
...(recordToCreate as { id: string }),
__typename: getObjectTypename(objectMetadataItem.nameSingular),
});
@@ -94,6 +99,7 @@ export const useCreateManyRecords = <
mutation: createManyRecordsMutation,
variables: {
data: sanitizedCreateManyRecordsInput,
upsert: upsert,
},
update: (cache, { data }) => {
const records = data?.[mutationResponseField];

View File

@@ -34,12 +34,16 @@ export const useCreateManyRecordsMutation = ({
const createManyRecordsMutation = gql`
mutation Create${capitalize(
objectMetadataItem.namePlural,
)}($data: [${capitalize(objectMetadataItem.nameSingular)}CreateInput!]!) {
${mutationResponseField}(data: $data) ${mapObjectMetadataToGraphQLQuery({
objectMetadataItems,
objectMetadataItem,
recordGqlFields,
})}
)}($data: [${capitalize(
objectMetadataItem.nameSingular,
)}CreateInput!]!, $upsert: Boolean) {
${mutationResponseField}(data: $data, upsert: $upsert) ${mapObjectMetadataToGraphQLQuery(
{
objectMetadataItems,
objectMetadataItem,
recordGqlFields,
},
)}
}`;
return {

View File

@@ -5,7 +5,7 @@ import { useObjectMetadataItem } from '@/object-metadata/hooks/useObjectMetadata
import { ObjectMetadataItemIdentifier } from '@/object-metadata/types/ObjectMetadataItemIdentifier';
import { getRecordsFromRecordConnection } from '@/object-record/cache/utils/getRecordsFromRecordConnection';
import { RecordGqlConnection } from '@/object-record/graphql/types/RecordGqlConnection';
import { RecordGqlOperationFindManyResult } from '@/object-record/graphql/types/RecordGqlOperationFindManyResult';
import { RecordGqlOperationFindDuplicatesResult } from '@/object-record/graphql/types/RecordGqlOperationFindDuplicatesResults';
import { useFindDuplicateRecordsQuery } from '@/object-record/hooks/useFindDuplicatesRecordsQuery';
import { ObjectRecord } from '@/object-record/types/ObjectRecord';
import { getFindDuplicateRecordsQueryResponseField } from '@/object-record/utils/getFindDuplicateRecordsQueryResponseField';
@@ -14,12 +14,12 @@ import { useSnackBar } from '@/ui/feedback/snack-bar-manager/hooks/useSnackBar';
import { logError } from '~/utils/logError';
export const useFindDuplicateRecords = <T extends ObjectRecord = ObjectRecord>({
objectRecordId = '',
objectRecordIds = [],
objectNameSingular,
onCompleted,
}: ObjectMetadataItemIdentifier & {
objectRecordId: string | undefined;
onCompleted?: (data: RecordGqlConnection) => void;
objectRecordIds: string[] | undefined;
onCompleted?: (data: RecordGqlConnection[]) => void;
skip?: boolean;
}) => {
const findDuplicateQueryStateIdentifier = objectNameSingular;
@@ -38,46 +38,48 @@ export const useFindDuplicateRecords = <T extends ObjectRecord = ObjectRecord>({
objectMetadataItem.nameSingular,
);
const { data, loading, error } = useQuery<RecordGqlOperationFindManyResult>(
findDuplicateRecordsQuery,
{
variables: {
id: objectRecordId,
const { data, loading, error } =
useQuery<RecordGqlOperationFindDuplicatesResult>(
findDuplicateRecordsQuery,
{
variables: {
ids: objectRecordIds,
},
onCompleted: (data) => {
onCompleted?.(data[queryResponseField]);
},
onError: (error) => {
logError(
`useFindDuplicateRecords for "${objectMetadataItem.nameSingular}" error : ` +
error,
);
enqueueSnackBar(
`Error during useFindDuplicateRecords for "${objectMetadataItem.nameSingular}", ${error.message}`,
{
variant: SnackBarVariant.Error,
},
);
},
},
onCompleted: (data) => {
onCompleted?.(data[queryResponseField]);
},
onError: (error) => {
logError(
`useFindDuplicateRecords for "${objectMetadataItem.nameSingular}" error : ` +
error,
);
enqueueSnackBar(
`Error during useFindDuplicateRecords for "${objectMetadataItem.nameSingular}", ${error.message}`,
{
variant: SnackBarVariant.Error,
},
);
},
},
);
);
const objectRecordConnection = data?.[queryResponseField];
const objectResults = data?.[queryResponseField];
const records = useMemo(
const results = useMemo(
() =>
objectRecordConnection
? (getRecordsFromRecordConnection({
recordConnection: objectRecordConnection,
}) as T[])
: [],
[objectRecordConnection],
objectResults?.map((result: RecordGqlConnection) => {
return result
? (getRecordsFromRecordConnection({
recordConnection: result,
}) as T[])
: [];
}),
[objectResults],
);
return {
objectMetadataItem,
records,
totalCount: objectRecordConnection?.totalCount,
results,
loading,
error,
queryStateIdentifier: findDuplicateQueryStateIdentifier,

View File

@@ -22,10 +22,10 @@ export const useFindDuplicateRecordsQuery = ({
const findDuplicateRecordsQuery = gql`
query FindDuplicate${capitalize(
objectMetadataItem.nameSingular,
)}($id: ID!) {
)}($ids: [ID!]!) {
${getFindDuplicateRecordsQueryResponseField(
objectMetadataItem.nameSingular,
)}(id: $id) {
)}(ids: $ids) {
edges {
node ${mapObjectMetadataToGraphQLQuery({
objectMetadataItems,
@@ -38,7 +38,6 @@ export const useFindDuplicateRecordsQuery = ({
startCursor
endCursor
}
${isAggregationEnabled(objectMetadataItem) ? 'totalCount' : ''}
}
}
`;

View File

@@ -12,18 +12,19 @@ export const RecordDetailDuplicatesSection = ({
objectRecordId: string;
objectNameSingular: string;
}) => {
const { records: duplicateRecords } = useFindDuplicateRecords({
objectRecordId,
const { results: queryResults } = useFindDuplicateRecords({
objectRecordIds: [objectRecordId],
objectNameSingular,
});
if (!duplicateRecords.length) return null;
if (!queryResults || !queryResults[0] || queryResults[0].length === 0)
return null;
return (
<RecordDetailSection>
<RecordDetailSectionHeader title="Duplicates" />
<RecordDetailRecordsList>
{duplicateRecords.slice(0, 5).map((duplicateRecord) => (
{queryResults[0].slice(0, 5).map((duplicateRecord) => (
<RecordDetailRecordsListItem key={duplicateRecord.id}>
<RecordChip
record={duplicateRecord}

View File

@@ -20,8 +20,11 @@ const companyMocks = [
{
request: {
query: gql`
mutation CreateCompanies($data: [CompanyCreateInput!]!) {
createCompanies(data: $data) {
mutation CreateCompanies(
$data: [CompanyCreateInput!]!
$upsert: Boolean
) {
createCompanies(data: $data, upsert: $upsert) {
__typename
xLink {
label
@@ -58,6 +61,7 @@ const companyMocks = [
id: companyId,
},
],
upsert: true,
},
},
result: jest.fn(() => ({

View File

@@ -26,7 +26,7 @@ export const useSpreadsheetRecordImport = (objectNameSingular: string) => {
.filter(
(x) =>
x.isActive &&
!x.isSystem &&
(!x.isSystem || x.name === 'id') &&
x.name !== 'createdAt' &&
(x.type !== FieldMetadataType.Relation || x.toRelationMetadata),
)
@@ -110,11 +110,15 @@ export const useSpreadsheetRecordImport = (objectNameSingular: string) => {
switch (field.type) {
case FieldMetadataType.Boolean:
fieldMapping[field.name] = value === 'true' || value === true;
if (value !== undefined) {
fieldMapping[field.name] = value === 'true' || value === true;
}
break;
case FieldMetadataType.Number:
case FieldMetadataType.Numeric:
fieldMapping[field.name] = Number(value);
if (value !== undefined) {
fieldMapping[field.name] = Number(value);
}
break;
case FieldMetadataType.Currency:
if (value !== undefined) {
@@ -154,14 +158,16 @@ export const useSpreadsheetRecordImport = (objectNameSingular: string) => {
}
break;
default:
fieldMapping[field.name] = value;
if (value !== undefined) {
fieldMapping[field.name] = value;
}
break;
}
}
return fieldMapping;
});
try {
await createManyRecords(createInputs);
await createManyRecords(createInputs, true);
} catch (error: any) {
enqueueSnackBar(error?.message || 'Something went wrong', {
variant: SnackBarVariant.Error,

View File

@@ -24,6 +24,7 @@ describe('formatToHumanReadableTime', () => {
it('should format the date to a human-readable time', () => {
const date = new Date('2022-01-01T12:30:00');
const result = formatToHumanReadableTime(date);
expect(result).toBe('12:30 PM');
// it seems when running locally on MacOS the space is not the same
expect(['12:30 PM', '12:30PM']).toContain(result);
});
});