mirror of
https://github.com/we-promise/sure.git
synced 2026-04-19 12:04:08 +00:00
Add transaction dedup support for CSV imports (#304)
* Support dedup for transaction also for CSV * Fix to exclude CSV importing duplicates * Guard nil account
This commit is contained in:
@@ -30,6 +30,7 @@ class Account::ProviderImportAdapter
|
||||
# If this is a new entry, check for potential duplicates from manual/CSV imports
|
||||
# This handles the case where a user manually created or CSV imported a transaction
|
||||
# before linking their account to a provider
|
||||
# Note: We don't pass name here to allow matching even when provider formats names differently
|
||||
if entry.new_record?
|
||||
duplicate = find_duplicate_transaction(date: date, amount: amount, currency: currency)
|
||||
if duplicate
|
||||
@@ -266,33 +267,41 @@ class Account::ProviderImportAdapter
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
# Finds a potential duplicate transaction from manual entry or CSV import
|
||||
# Matches on date, amount, currency, and optionally name
|
||||
# Only matches transactions without external_id (manual/CSV imported)
|
||||
#
|
||||
# @param date [Date, String] Transaction date
|
||||
# @param amount [BigDecimal, Numeric] Transaction amount
|
||||
# @param currency [String] Currency code
|
||||
# @param name [String, nil] Optional transaction name for more accurate matching
|
||||
# @param exclude_entry_ids [Set, Array, nil] Entry IDs to exclude from the search (e.g., already claimed entries)
|
||||
# @return [Entry, nil] The duplicate entry or nil if not found
|
||||
def find_duplicate_transaction(date:, amount:, currency:, name: nil, exclude_entry_ids: nil)
|
||||
# Convert date to Date object if it's a string
|
||||
date = Date.parse(date.to_s) unless date.is_a?(Date)
|
||||
|
||||
# Finds a potential duplicate transaction from manual entry or CSV import
|
||||
# Matches on date, amount, and currency
|
||||
# Only matches transactions without external_id (manual/CSV imported)
|
||||
#
|
||||
# @param date [Date, String] Transaction date
|
||||
# @param amount [BigDecimal, Numeric] Transaction amount
|
||||
# @param currency [String] Currency code
|
||||
# @return [Entry, nil] The duplicate entry or nil if not found
|
||||
def find_duplicate_transaction(date:, amount:, currency:)
|
||||
# Convert date to Date object if it's a string
|
||||
date = Date.parse(date.to_s) unless date.is_a?(Date)
|
||||
# Look for entries on the same account with:
|
||||
# 1. Same date
|
||||
# 2. Same amount (exact match)
|
||||
# 3. Same currency
|
||||
# 4. No external_id (manual/CSV imported transactions)
|
||||
# 5. Entry type is Transaction (not Trade or Valuation)
|
||||
# 6. Optionally same name (if name parameter is provided)
|
||||
# 7. Not in the excluded IDs list (if provided)
|
||||
query = account.entries
|
||||
.where(entryable_type: "Transaction")
|
||||
.where(date: date)
|
||||
.where(amount: amount)
|
||||
.where(currency: currency)
|
||||
.where(external_id: nil)
|
||||
|
||||
# Look for entries on the same account with:
|
||||
# 1. Same date
|
||||
# 2. Same amount (exact match)
|
||||
# 3. Same currency
|
||||
# 4. No external_id (manual/CSV imported transactions)
|
||||
# 5. Entry type is Transaction (not Trade or Valuation)
|
||||
account.entries
|
||||
.where(entryable_type: "Transaction")
|
||||
.where(date: date)
|
||||
.where(amount: amount)
|
||||
.where(currency: currency)
|
||||
.where(external_id: nil)
|
||||
.order(created_at: :asc)
|
||||
.first
|
||||
end
|
||||
# Add name filter if provided
|
||||
query = query.where(name: name) if name.present?
|
||||
|
||||
# Exclude already claimed entries if provided
|
||||
query = query.where.not(id: exclude_entry_ids) if exclude_entry_ids.present?
|
||||
|
||||
query.order(created_at: :asc).first
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
class Import < ApplicationRecord
|
||||
MaxRowCountExceededError = Class.new(StandardError)
|
||||
MappingError = Class.new(StandardError)
|
||||
|
||||
TYPES = %w[TransactionImport TradeImport AccountImport MintImport].freeze
|
||||
SIGNAGE_CONVENTIONS = %w[inflows_positive inflows_negative]
|
||||
|
||||
@@ -3,32 +3,76 @@ class TransactionImport < Import
|
||||
transaction do
|
||||
mappings.each(&:create_mappable!)
|
||||
|
||||
transactions = rows.map do |row|
|
||||
new_transactions = []
|
||||
updated_entries = []
|
||||
claimed_entry_ids = Set.new # Track entries we've already claimed in this import
|
||||
|
||||
rows.each_with_index do |row, index|
|
||||
mapped_account = if account
|
||||
account
|
||||
else
|
||||
mappings.accounts.mappable_for(row.account)
|
||||
end
|
||||
|
||||
# Guard against nil account - this happens when an account name in CSV is not mapped
|
||||
if mapped_account.nil?
|
||||
row_number = index + 1
|
||||
account_name = row.account.presence || "(blank)"
|
||||
error_message = "Row #{row_number}: Account '#{account_name}' is not mapped to an existing account. " \
|
||||
"Please map this account in the import configuration."
|
||||
errors.add(:base, error_message)
|
||||
raise Import::MappingError, error_message
|
||||
end
|
||||
|
||||
category = mappings.categories.mappable_for(row.category)
|
||||
tags = row.tags_list.map { |tag| mappings.tags.mappable_for(tag) }.compact
|
||||
|
||||
Transaction.new(
|
||||
category: category,
|
||||
tags: tags,
|
||||
entry: Entry.new(
|
||||
account: mapped_account,
|
||||
date: row.date_iso,
|
||||
amount: row.signed_amount,
|
||||
name: row.name,
|
||||
currency: row.currency,
|
||||
notes: row.notes,
|
||||
import: self
|
||||
)
|
||||
# Check for duplicate transactions using the adapter's deduplication logic
|
||||
# Pass claimed_entry_ids to exclude entries we've already matched in this import
|
||||
# This ensures identical rows within the CSV are all imported as separate transactions
|
||||
adapter = Account::ProviderImportAdapter.new(mapped_account)
|
||||
duplicate_entry = adapter.find_duplicate_transaction(
|
||||
date: row.date_iso,
|
||||
amount: row.signed_amount,
|
||||
currency: row.currency,
|
||||
name: row.name,
|
||||
exclude_entry_ids: claimed_entry_ids
|
||||
)
|
||||
|
||||
if duplicate_entry
|
||||
# Update existing transaction instead of creating a new one
|
||||
duplicate_entry.transaction.category = category if category.present?
|
||||
duplicate_entry.transaction.tags = tags if tags.any?
|
||||
duplicate_entry.notes = row.notes if row.notes.present?
|
||||
duplicate_entry.import = self
|
||||
updated_entries << duplicate_entry
|
||||
claimed_entry_ids.add(duplicate_entry.id)
|
||||
else
|
||||
# Create new transaction (no duplicate found)
|
||||
new_transactions << Transaction.new(
|
||||
category: category,
|
||||
tags: tags,
|
||||
entry: Entry.new(
|
||||
account: mapped_account,
|
||||
date: row.date_iso,
|
||||
amount: row.signed_amount,
|
||||
name: row.name,
|
||||
currency: row.currency,
|
||||
notes: row.notes,
|
||||
import: self
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
Transaction.import!(transactions, recursive: true)
|
||||
# Save updated entries first
|
||||
updated_entries.each do |entry|
|
||||
entry.transaction.save!
|
||||
entry.save!
|
||||
end
|
||||
|
||||
# Bulk import new transactions
|
||||
Transaction.import!(new_transactions, recursive: true) if new_transactions.any?
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
Reference in New Issue
Block a user