Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .changeset/red-boxes-float.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
'@rushdb/javascript-sdk': patch
'rushdb-core': patch
'rushdb-docs': patch
'@rushdb/mcp-server': patch
'rushdb-dashboard': patch
'rushdb-website': patch
---

Fix deduplication issue for nested upsert
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import path from 'path'
import dotenv from 'dotenv'

dotenv.config({ path: path.resolve(__dirname, '../.env') })

import RushDB from '../src/index.node'

jest.setTimeout(60_000)

describe('records.importJson upsert nested linking (e2e)', () => {
const apiKey = process.env.RUSHDB_API_KEY
const apiUrl = process.env.RUSHDB_API_URL || 'http://localhost:3000'

if (!apiKey) {
it('skips because RUSHDB_API_KEY is not set', () => {
expect(true).toBe(true)
})
return
}

const db = new RushDB(apiKey, { url: apiUrl })

it('reuses child record and links it to new parent when parent upsert creates a new record', async () => {
const tenantId = `nested-upsert-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`

const payloadA = {
name: 'Abshire - Farrell',
address: '928 Conroy Village Suite 785',
foundedAt: '1949-10-06T22:07:28.709Z',
rating: 1.9,
tenantId,
department: [
{
name: 'Sports',
description: 'The sleek and filthy Gloves comes with sky blue LED lighting for smart functionality',
tenantId
}
]
}

const payloadB = {
...payloadA,
rating: 2 // slight change should force new top-level record when mergeBy is all keys
}

// First import creates parent and child
await db.records.importJson({ label: 'Company', data: payloadA, options: { suggestTypes: true } })

// Second import should create new parent record but reuse existing child and link it
await db.records.importJson({
label: 'Company',
data: payloadB,
options: { suggestTypes: true, mergeBy: [] }
})

// Verify there are two Company records and one Department record linked to both via default relation
const companies = await db.records.find({ labels: ['Company'], where: { tenantId } })
expect(companies.total).toBe(2)

const departments = await db.records.find({ labels: ['department'], where: { tenantId } })
// label normalization in service uses original key; depending on capitalization option it might be 'department'
// The label comes from the original key 'department' in the payload
expect(departments.total).toBe(1)

// Fetch relations and ensure both companies are connected to the same department
const relationsResp = await db.relationships.find({ where: { tenantId } })
const rels = relationsResp.data.filter(
(r) =>
r.type &&
(r.type.includes('RUSHDB_DEFAULT_RELATION') || r.type.includes('__RUSHDB__RELATION__DEFAULT__'))
)

const departmentId = departments.data[0].id()
const companyIds = companies.data.map((c) => c.id())

// For each company, there must be at least one relation to the department (either direction)
const relatedPairs = new Set(rels.map((r) => `${r.sourceId}->${r.targetId}`))
for (const cid of companyIds) {
const has = relatedPairs.has(`${cid}->${departmentId}`) || relatedPairs.has(`${departmentId}->${cid}`)
expect(has).toBe(true)
}

// Cleanup
await db.records.delete({ where: { tenantId } })
})
})
8 changes: 6 additions & 2 deletions platform/core/src/core/entity/entity-query.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,10 @@ export class EntityQueryService {

if (withResults) {
queryBuilder.append(
`RETURN collect(DISTINCT record {${PROPERTY_WILDCARD_PROJECTION}, ${label()}}) as data`
`RETURN collect({draftId: r.id, persistedId: record.${RUSHDB_KEY_ID}}) as idmap, collect(DISTINCT record {${PROPERTY_WILDCARD_PROJECTION}, ${label()}}) as data`
)
} else {
queryBuilder.append(`RETURN collect({draftId: r.id, persistedId: record.${RUSHDB_KEY_ID}}) as idmap`)
}

return queryBuilder.getQuery()
Expand Down Expand Up @@ -220,8 +222,10 @@ export class EntityQueryService {

if (withResults) {
queryBuilder.append(
`RETURN collect(DISTINCT record {${PROPERTY_WILDCARD_PROJECTION}, ${label()}}) as data`
`RETURN collect({draftId: r.id, persistedId: record.${RUSHDB_KEY_ID}}) as idmap, collect(DISTINCT record {${PROPERTY_WILDCARD_PROJECTION}, ${label()}}) as data`
)
} else {
queryBuilder.append(`RETURN collect({draftId: r.id, persistedId: record.${RUSHDB_KEY_ID}}) as idmap`)
}

return queryBuilder.getQuery()
Expand Down
28 changes: 25 additions & 3 deletions platform/core/src/core/entity/import-export/import.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,8 @@ export class ImportService {

// @TODO: Accumulate result only if records <= 1000. Otherwise - ignore options.returnResult
let result = []
// Map draft record ids (generated during serialization) to actual persisted record ids after upsert/create
const draftToPersistedId = new Map<string, string>()
for (let i = 0; i < records.length; i += CHUNK_SIZE) {
const recordsChunk = records.slice(i, i + CHUNK_SIZE)

Expand All @@ -317,13 +319,33 @@ export class ImportService {
projectId
})

// Extract id map and results (if requested)
const idmap = data.records?.[0]?.get('idmap') ?? []
if (Array.isArray(idmap)) {
for (const item of idmap) {
if (item && item.draftId && item.persistedId) {
draftToPersistedId.set(item.draftId, item.persistedId)
}
}
}

if (options.returnResult) {
result = result.concat(data.records?.[0]?.get('data'))
const chunkData = data.records?.[0]?.get('data')
if (Array.isArray(chunkData)) {
result = result.concat(chunkData)
}
}
}

for (let i = 0; i < relations.length; i += CHUNK_SIZE) {
const relationsChunk = relations.slice(i, i + CHUNK_SIZE)
// Remap relations to persisted IDs in case upsert matched existing records
const remappedRelations = relations.map((rel) => ({
source: draftToPersistedId.get(rel.source) ?? rel.source,
target: draftToPersistedId.get(rel.target) ?? rel.target,
type: rel.type
}))

for (let i = 0; i < remappedRelations.length; i += CHUNK_SIZE) {
const relationsChunk = remappedRelations.slice(i, i + CHUNK_SIZE)
await this.processRelationshipsChunk({
relationsChunk,
projectId,
Expand Down