Merge pull request #45263 from frappe/version-15-hotfix

chore: release v15
This commit is contained in:
rohitwaghchaure
2025-01-15 17:57:26 +05:30
committed by GitHub
52 changed files with 783 additions and 1770 deletions

View File

@@ -40,9 +40,13 @@
"show_payment_schedule_in_print",
"currency_exchange_section",
"allow_stale",
"column_break_yuug",
"stale_days",
"section_break_jpd0",
"auto_reconcile_payments",
"stale_days",
"auto_reconciliation_job_trigger",
"reconciliation_queue_size",
"column_break_resa",
"invoicing_settings_tab",
"accounts_transactions_settings_section",
"over_billing_allowance",
@@ -489,6 +493,28 @@
"fieldname": "create_pr_in_draft_status",
"fieldtype": "Check",
"label": "Create in Draft Status"
},
{
"fieldname": "column_break_yuug",
"fieldtype": "Column Break"
},
{
"fieldname": "column_break_resa",
"fieldtype": "Column Break"
},
{
"default": "15",
"description": "Interval should be between 1 to 59 MInutes",
"fieldname": "auto_reconciliation_job_trigger",
"fieldtype": "Int",
"label": "Auto Reconciliation Job Trigger"
},
{
"default": "5",
"description": "Documents Processed on each trigger. Queue Size should be between 5 and 100",
"fieldname": "reconciliation_queue_size",
"fieldtype": "Int",
"label": "Reconciliation Queue Size"
}
],
"icon": "icon-cog",
@@ -496,7 +522,7 @@
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2024-07-26 06:48:52.714630",
"modified": "2025-01-13 17:38:39.661320",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Accounts Settings",

View File

@@ -10,6 +10,7 @@ from frappe.custom.doctype.property_setter.property_setter import make_property_
from frappe.model.document import Document
from frappe.utils import cint
from erpnext.accounts.utils import sync_auto_reconcile_config
from erpnext.stock.utils import check_pending_reposting
@@ -27,6 +28,7 @@ class AccountsSettings(Document):
allow_multi_currency_invoices_against_single_party_account: DF.Check
allow_stale: DF.Check
auto_reconcile_payments: DF.Check
auto_reconciliation_job_trigger: DF.Int
automatically_fetch_payment_terms: DF.Check
automatically_process_deferred_accounting_entry: DF.Check
book_asset_depreciation_entry_automatically: DF.Check
@@ -51,6 +53,7 @@ class AccountsSettings(Document):
over_billing_allowance: DF.Currency
post_change_gl_entries: DF.Check
receivable_payable_remarks_length: DF.Int
reconciliation_queue_size: DF.Int
role_allowed_to_over_bill: DF.Link | None
round_row_wise_tax: DF.Check
show_balance_in_coa: DF.Check
@@ -90,6 +93,8 @@ class AccountsSettings(Document):
if clear_cache:
frappe.clear_cache()
self.validate_and_sync_auto_reconcile_config()
def validate_stale_days(self):
if not self.allow_stale and cint(self.stale_days) <= 0:
frappe.msgprint(
@@ -114,3 +119,17 @@ class AccountsSettings(Document):
def validate_pending_reposts(self):
if self.acc_frozen_upto:
check_pending_reposting(self.acc_frozen_upto)
def validate_and_sync_auto_reconcile_config(self):
if self.has_value_changed("auto_reconciliation_job_trigger"):
if (
cint(self.auto_reconciliation_job_trigger) > 0
and cint(self.auto_reconciliation_job_trigger) < 60
):
sync_auto_reconcile_config(self.auto_reconciliation_job_trigger)
else:
frappe.throw(_("Cron Interval should be between 1 and 59 Min"))
if self.has_value_changed("reconciliation_queue_size"):
if cint(self.reconciliation_queue_size) < 5 or cint(self.reconciliation_queue_size) > 100:
frappe.throw(_("Queue Size should be between 5 and 100"))

View File

@@ -21,6 +21,7 @@
"party_name",
"book_advance_payments_in_separate_party_account",
"reconcile_on_advance_payment_date",
"advance_reconciliation_takes_effect_on",
"column_break_11",
"bank_account",
"party_bank_account",
@@ -782,6 +783,16 @@
"options": "No\nYes",
"print_hide": 1,
"search_index": 1
},
{
"default": "Oldest Of Invoice Or Advance",
"fetch_from": "company.reconciliation_takes_effect_on",
"fieldname": "advance_reconciliation_takes_effect_on",
"fieldtype": "Select",
"hidden": 1,
"label": "Advance Reconciliation Takes Effect On",
"no_copy": 1,
"options": "Advance Payment Date\nOldest Of Invoice Or Advance\nReconciliation Date"
}
],
"index_web_pages_for_search": 1,
@@ -795,7 +806,7 @@
"table_fieldname": "payment_entries"
}
],
"modified": "2024-11-07 11:19:19.320883",
"modified": "2025-01-13 16:03:47.169699",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Payment Entry",

View File

@@ -1401,16 +1401,26 @@ class PaymentEntry(AccountsController):
"voucher_detail_no": invoice.name,
}
if self.reconcile_on_advance_payment_date:
posting_date = self.posting_date
if invoice.reconcile_effect_on:
posting_date = invoice.reconcile_effect_on
else:
date_field = "posting_date"
if invoice.reference_doctype in ["Sales Order", "Purchase Order"]:
date_field = "transaction_date"
posting_date = frappe.db.get_value(invoice.reference_doctype, invoice.reference_name, date_field)
if getdate(posting_date) < getdate(self.posting_date):
# For backwards compatibility
# Supporting reposting on payment entries reconciled before select field introduction
if self.advance_reconciliation_takes_effect_on == "Advance Payment Date":
posting_date = self.posting_date
elif self.advance_reconciliation_takes_effect_on == "Oldest Of Invoice Or Advance":
date_field = "posting_date"
if invoice.reference_doctype in ["Sales Order", "Purchase Order"]:
date_field = "transaction_date"
posting_date = frappe.db.get_value(
invoice.reference_doctype, invoice.reference_name, date_field
)
if getdate(posting_date) < getdate(self.posting_date):
posting_date = self.posting_date
elif self.advance_reconciliation_takes_effect_on == "Reconciliation Date":
posting_date = nowdate()
frappe.db.set_value("Payment Entry Reference", invoice.name, "reconcile_effect_on", posting_date)
dr_or_cr, account = self.get_dr_and_account_for_advances(invoice)
args_dict["account"] = account

View File

@@ -13,6 +13,7 @@
"payment_term_outstanding",
"account_type",
"payment_type",
"reconcile_effect_on",
"column_break_4",
"total_amount",
"outstanding_amount",
@@ -144,12 +145,18 @@
"is_virtual": 1,
"label": "Payment Request Outstanding",
"read_only": 1
},
{
"fieldname": "reconcile_effect_on",
"fieldtype": "Date",
"label": "Reconcile Effect On",
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-09-16 18:11:50.019343",
"modified": "2025-01-13 15:56:18.895082",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Payment Entry Reference",

View File

@@ -30,6 +30,7 @@ class PaymentEntryReference(Document):
payment_term: DF.Link | None
payment_term_outstanding: DF.Float
payment_type: DF.Data | None
reconcile_effect_on: DF.Date | None
reference_doctype: DF.Link
reference_name: DF.DynamicLink
total_amount: DF.Float

View File

@@ -6,6 +6,7 @@ import frappe
from frappe import qb
from frappe.tests.utils import FrappeTestCase, change_settings
from frappe.utils import add_days, add_years, flt, getdate, nowdate, today
from frappe.utils.data import getdate as convert_to_date
from erpnext import get_default_cost_center
from erpnext.accounts.doctype.payment_entry.payment_entry import get_payment_entry
@@ -1671,7 +1672,7 @@ class TestPaymentReconciliation(FrappeTestCase):
{
"book_advance_payments_in_separate_party_account": 1,
"default_advance_paid_account": self.advance_payable_account,
"reconcile_on_advance_payment_date": 1,
"reconciliation_takes_effect_on": "Advance Payment Date",
},
)
@@ -1720,7 +1721,7 @@ class TestPaymentReconciliation(FrappeTestCase):
{
"book_advance_payments_in_separate_party_account": 1,
"default_advance_received_account": self.advance_receivable_account,
"reconcile_on_advance_payment_date": 0,
"reconciliation_takes_effect_on": "Oldest Of Invoice Or Advance",
},
)
amount = 200.0
@@ -1829,7 +1830,7 @@ class TestPaymentReconciliation(FrappeTestCase):
{
"book_advance_payments_in_separate_party_account": 1,
"default_advance_paid_account": self.advance_payable_account,
"reconcile_on_advance_payment_date": 0,
"reconciliation_takes_effect_on": "Oldest Of Invoice Or Advance",
},
)
amount = 200.0
@@ -2048,6 +2049,102 @@ class TestPaymentReconciliation(FrappeTestCase):
self.assertEqual(pr.get("invoices"), [])
self.assertEqual(pr.get("payments"), [])
def test_advance_reconciliation_effect_on_same_date(self):
frappe.db.set_value(
"Company",
self.company,
{
"book_advance_payments_in_separate_party_account": 1,
"default_advance_received_account": self.advance_receivable_account,
"reconciliation_takes_effect_on": "Reconciliation Date",
},
)
inv_date = convert_to_date(add_days(nowdate(), -1))
adv_date = convert_to_date(add_days(nowdate(), -2))
si = self.create_sales_invoice(posting_date=inv_date, qty=1, rate=200)
pe = self.create_payment_entry(posting_date=adv_date, amount=80).save().submit()
pr = self.create_payment_reconciliation()
pr.from_invoice_date = add_days(nowdate(), -1)
pr.to_invoice_date = nowdate()
pr.from_payment_date = add_days(nowdate(), -2)
pr.to_payment_date = nowdate()
pr.default_advance_account = self.advance_receivable_account
# reconcile multiple payments against invoice
pr.get_unreconciled_entries()
invoices = [x.as_dict() for x in pr.get("invoices")]
payments = [x.as_dict() for x in pr.get("payments")]
pr.allocate_entries(frappe._dict({"invoices": invoices, "payments": payments}))
# Difference amount should not be calculated for base currency accounts
for row in pr.allocation:
self.assertEqual(flt(row.get("difference_amount")), 0.0)
pr.reconcile()
si.reload()
self.assertEqual(si.status, "Partly Paid")
# check PR tool output post reconciliation
self.assertEqual(len(pr.get("invoices")), 1)
self.assertEqual(pr.get("invoices")[0].get("outstanding_amount"), 120)
self.assertEqual(pr.get("payments"), [])
# Assert Ledger Entries
gl_entries = frappe.db.get_all(
"GL Entry",
filters={"voucher_no": pe.name},
fields=["account", "posting_date", "voucher_no", "against_voucher", "debit", "credit"],
order_by="account, against_voucher, debit",
)
expected_gl = [
{
"account": self.advance_receivable_account,
"posting_date": adv_date,
"voucher_no": pe.name,
"against_voucher": pe.name,
"debit": 0.0,
"credit": 80.0,
},
{
"account": self.advance_receivable_account,
"posting_date": convert_to_date(nowdate()),
"voucher_no": pe.name,
"against_voucher": pe.name,
"debit": 80.0,
"credit": 0.0,
},
{
"account": self.debit_to,
"posting_date": convert_to_date(nowdate()),
"voucher_no": pe.name,
"against_voucher": si.name,
"debit": 0.0,
"credit": 80.0,
},
{
"account": self.bank,
"posting_date": adv_date,
"voucher_no": pe.name,
"against_voucher": None,
"debit": 80.0,
"credit": 0.0,
},
]
self.assertEqual(expected_gl, gl_entries)
# cancel PE
pe.reload()
pe.cancel()
pr.get_unreconciled_entries()
# check PR tool output
self.assertEqual(len(pr.get("invoices")), 1)
self.assertEqual(len(pr.get("payments")), 0)
self.assertEqual(pr.get("invoices")[0].get("outstanding_amount"), 200)
def make_customer(customer_name, currency=None):
if not frappe.db.exists("Customer", customer_name):

View File

@@ -210,7 +210,7 @@ def trigger_reconciliation_for_queued_docs():
docs_to_trigger = []
unique_filters = set()
queue_size = 5
queue_size = frappe.db.get_single_value("Accounts Settings", "reconciliation_queue_size") or 5
fields = ["company", "party_type", "party", "receivable_payable_account", "default_advance_account"]

View File

@@ -1623,7 +1623,7 @@
{
"default": "1",
"depends_on": "eval: doc.is_return && doc.return_against",
"description": "Debit Note will update it's own outstanding amount, even if \"Return Against\" is specified.",
"description": "Debit Note will update it's own outstanding amount, even if 'Return Against' is specified.",
"fieldname": "update_outstanding_for_self",
"fieldtype": "Check",
"label": "Update Outstanding for Self"
@@ -1633,7 +1633,7 @@
"idx": 204,
"is_submittable": 1,
"links": [],
"modified": "2024-10-25 18:13:01.944477",
"modified": "2025-01-14 11:39:04.564610",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Purchase Invoice",

View File

@@ -2161,7 +2161,7 @@
{
"default": "1",
"depends_on": "eval: doc.is_return && doc.return_against",
"description": "Credit Note will update it's own outstanding amount, even if \"Return Against\" is specified.",
"description": "Credit Note will update it's own outstanding amount, even if 'Return Against' is specified.",
"fieldname": "update_outstanding_for_self",
"fieldtype": "Check",
"label": "Update Outstanding for Self",
@@ -2186,7 +2186,7 @@
"link_fieldname": "consolidated_invoice"
}
],
"modified": "2024-11-26 12:34:09.110690",
"modified": "2025-01-14 11:38:30.446370",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Sales Invoice",

View File

@@ -365,7 +365,7 @@ class SalesInvoice(SellingController):
if self.update_stock:
frappe.throw(_("'Update Stock' cannot be checked for fixed asset sale"))
elif asset.status in ("Scrapped", "Cancelled", "Capitalized", "Decapitalized") or (
elif asset.status in ("Scrapped", "Cancelled", "Capitalized") or (
asset.status == "Sold" and not self.is_return
):
frappe.throw(

View File

@@ -697,7 +697,7 @@ class Subscription(Document):
self.status = "Cancelled"
self.cancelation_date = nowdate()
if to_generate_invoice:
if to_generate_invoice and self.cancelation_date >= self.current_invoice_start:
self.generate_invoice(self.current_invoice_start, self.cancelation_date)
self.save()

View File

@@ -555,9 +555,11 @@ def get_tds_amount(ldc, parties, inv, tax_details, vouchers):
else:
tax_withholding_net_total = inv.get("tax_withholding_net_total", 0)
if (threshold and tax_withholding_net_total >= threshold) or (
has_cumulative_threshold_breached = (
cumulative_threshold and (supp_credit_amt + supp_inv_credit_amt) >= cumulative_threshold
):
)
if (threshold and tax_withholding_net_total >= threshold) or (has_cumulative_threshold_breached):
# Get net total again as TDS is calculated on net total
# Grand is used to just check for threshold breach
net_total = (
@@ -565,9 +567,7 @@ def get_tds_amount(ldc, parties, inv, tax_details, vouchers):
)
supp_credit_amt += net_total
if (cumulative_threshold and supp_credit_amt >= cumulative_threshold) and cint(
tax_details.tax_on_excess_amount
):
if has_cumulative_threshold_breached and cint(tax_details.tax_on_excess_amount):
supp_credit_amt = net_total + tax_withholding_net_total - cumulative_threshold
if ldc and is_valid_certificate(ldc, inv.get("posting_date") or inv.get("transaction_date"), 0):

View File

@@ -318,7 +318,7 @@ def get_columns(additional_table_columns, filters):
"width": 100,
},
{
"label": _("Tax Rate"),
"label": _("Rate"),
"fieldname": "rate",
"fieldtype": "Float",
"options": "currency",

View File

@@ -712,6 +712,23 @@ def update_reference_in_payment_entry(
}
update_advance_paid = []
# Update Reconciliation effect date in reference
if payment_entry.book_advance_payments_in_separate_party_account:
if payment_entry.advance_reconciliation_takes_effect_on == "Advance Payment Date":
reconcile_on = payment_entry.posting_date
elif payment_entry.advance_reconciliation_takes_effect_on == "Oldest Of Invoice Or Advance":
date_field = "posting_date"
if d.against_voucher_type in ["Sales Order", "Purchase Order"]:
date_field = "transaction_date"
reconcile_on = frappe.db.get_value(d.against_voucher_type, d.against_voucher, date_field)
if getdate(reconcile_on) < getdate(payment_entry.posting_date):
reconcile_on = payment_entry.posting_date
elif payment_entry.advance_reconciliation_takes_effect_on == "Reconciliation Date":
reconcile_on = nowdate()
reference_details.update({"reconcile_effect_on": reconcile_on})
if d.voucher_detail_no:
existing_row = payment_entry.get("references", {"name": d["voucher_detail_no"]})[0]
@@ -2233,3 +2250,38 @@ def run_ledger_health_checks():
doc.general_and_payment_ledger_mismatch = True
doc.checked_on = run_date
doc.save()
def sync_auto_reconcile_config(auto_reconciliation_job_trigger: int = 15):
auto_reconciliation_job_trigger = auto_reconciliation_job_trigger or frappe.db.get_single_value(
"Accounts Settings", "auto_reconciliation_job_trigger"
)
method = "erpnext.accounts.doctype.process_payment_reconciliation.process_payment_reconciliation.trigger_reconciliation_for_queued_docs"
sch_event = frappe.get_doc(
"Scheduler Event", {"scheduled_against": "Process Payment Reconciliation", "method": method}
)
if frappe.db.get_value("Scheduled Job Type", {"method": method}):
frappe.get_doc(
"Scheduled Job Type",
{
"method": method,
},
).update(
{
"cron_format": f"0/{auto_reconciliation_job_trigger} * * * *",
"scheduler_event": sch_event.name,
}
).save()
else:
frappe.get_doc(
{
"doctype": "Scheduled Job Type",
"method": method,
"scheduler_event": sch_event.name,
"cron_format": f"0/{auto_reconciliation_job_trigger} * * * *",
"create_log": True,
"stopped": False,
"frequency": "Cron",
}
).save()

View File

@@ -378,7 +378,7 @@
"in_standard_filter": 1,
"label": "Status",
"no_copy": 1,
"options": "Draft\nSubmitted\nPartially Depreciated\nFully Depreciated\nSold\nScrapped\nIn Maintenance\nOut of Order\nIssue\nReceipt\nCapitalized\nDecapitalized\nWork In Progress",
"options": "Draft\nSubmitted\nPartially Depreciated\nFully Depreciated\nSold\nScrapped\nIn Maintenance\nOut of Order\nIssue\nReceipt\nCapitalized\nWork In Progress",
"read_only": 1
},
{

View File

@@ -111,7 +111,6 @@ class Asset(AccountsController):
"Issue",
"Receipt",
"Capitalized",
"Decapitalized",
"Work In Progress",
]
supplier: DF.Link | None

View File

@@ -10,8 +10,8 @@ frappe.listview_settings["Asset"] = {
return [__("Sold"), "green", "status,=,Sold"];
} else if (doc.status === "Work In Progress") {
return [__("Work In Progress"), "orange", "status,=,Work In Progress"];
} else if (["Capitalized", "Decapitalized"].includes(doc.status)) {
return [__(doc.status), "grey", "status,=," + doc.status];
} else if (doc.status === "Capitalized") {
return [__("Capitalized"), "grey", "status,=,Capitalized"];
} else if (doc.status === "Scrapped") {
return [__("Scrapped"), "grey", "status,=,Scrapped"];
} else if (doc.status === "In Maintenance") {

View File

@@ -436,7 +436,7 @@ def scrap_asset(asset_name):
if asset.docstatus != 1:
frappe.throw(_("Asset {0} must be submitted").format(asset.name))
elif asset.status in ("Cancelled", "Sold", "Scrapped", "Capitalized", "Decapitalized"):
elif asset.status in ("Cancelled", "Sold", "Scrapped", "Capitalized"):
frappe.throw(_("Asset {0} cannot be scrapped, as it is already {1}").format(asset.name, asset.status))
date = today()

View File

@@ -36,11 +36,7 @@ erpnext.assets.AssetCapitalization = class AssetCapitalization extends erpnext.s
me.setup_warehouse_query();
me.frm.set_query("target_item_code", function () {
if (me.frm.doc.entry_type == "Capitalization") {
return erpnext.queries.item({ is_stock_item: 0, is_fixed_asset: 1 });
} else {
return erpnext.queries.item({ is_stock_item: 1, is_fixed_asset: 0 });
}
return erpnext.queries.item({ is_stock_item: 0, is_fixed_asset: 1 });
});
me.frm.set_query("target_asset", function () {
@@ -51,7 +47,7 @@ erpnext.assets.AssetCapitalization = class AssetCapitalization extends erpnext.s
me.frm.set_query("asset", "asset_items", function () {
var filters = {
status: ["not in", ["Draft", "Scrapped", "Sold", "Capitalized", "Decapitalized"]],
status: ["not in", ["Draft", "Scrapped", "Sold", "Capitalized"]],
docstatus: 1,
};

View File

@@ -8,30 +8,26 @@
"engine": "InnoDB",
"field_order": [
"title",
"company",
"naming_series",
"entry_type",
"target_item_name",
"target_is_fixed_asset",
"target_has_batch_no",
"target_has_serial_no",
"column_break_9",
"capitalization_method",
"target_item_code",
"target_asset_location",
"target_item_name",
"target_asset",
"target_asset_name",
"target_warehouse",
"target_qty",
"target_stock_uom",
"target_batch_no",
"target_serial_no",
"column_break_5",
"finance_book",
"target_asset_location",
"column_break_9",
"company",
"posting_date",
"posting_time",
"set_posting_time",
"finance_book",
"target_batch_no",
"target_serial_no",
"amended_from",
"target_is_fixed_asset",
"target_has_batch_no",
"target_has_serial_no",
"section_break_16",
"stock_items",
"stock_items_total",
@@ -58,12 +54,12 @@
"label": "Title"
},
{
"depends_on": "eval:(doc.target_item_code && !doc.__islocal && doc.capitalization_method !== 'Choose a WIP composite asset') || ((doc.entry_type=='Capitalization' && doc.capitalization_method=='Create a new composite asset') || doc.entry_type=='Decapitalization')",
"depends_on": "eval:(doc.target_item_code && !doc.__islocal && doc.capitalization_method !== 'Choose a WIP composite asset') || doc.capitalization_method=='Create a new composite asset'",
"fieldname": "target_item_code",
"fieldtype": "Link",
"in_standard_filter": 1,
"label": "Target Item Code",
"mandatory_depends_on": "eval:(doc.entry_type=='Capitalization' && doc.capitalization_method=='Create a new composite asset') || doc.entry_type=='Decapitalization'",
"mandatory_depends_on": "eval:doc.capitalization_method=='Create a new composite asset'",
"options": "Item"
},
{
@@ -84,22 +80,18 @@
"read_only": 1
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"depends_on": "eval:(doc.target_asset && !doc.__islocal) || (doc.entry_type=='Capitalization' && doc.capitalization_method=='Choose a WIP composite asset')",
"depends_on": "eval:(doc.target_asset && !doc.__islocal) || doc.capitalization_method=='Choose a WIP composite asset'",
"fieldname": "target_asset",
"fieldtype": "Link",
"in_standard_filter": 1,
"label": "Target Asset",
"mandatory_depends_on": "eval:doc.entry_type=='Capitalization' && doc.capitalization_method=='Choose a WIP composite asset'",
"mandatory_depends_on": "eval:doc.capitalization_method=='Choose a WIP composite asset'",
"no_copy": 1,
"options": "Asset",
"read_only_depends_on": "eval:(doc.entry_type=='Decapitalization') || (doc.entry_type=='Capitalization' && doc.capitalization_method=='Create a new composite asset')"
"read_only_depends_on": "eval:doc.capitalization_method=='Create a new composite asset'"
},
{
"depends_on": "eval:(doc.target_asset_name && !doc.__islocal) || (doc.target_asset && doc.entry_type=='Capitalization' && doc.capitalization_method=='Choose a WIP composite asset')",
"depends_on": "eval:(doc.target_asset_name && !doc.__islocal) || (doc.target_asset && doc.capitalization_method=='Choose a WIP composite asset')",
"fetch_from": "target_asset.asset_name",
"fieldname": "target_asset_name",
"fieldtype": "Data",
@@ -162,7 +154,7 @@
"read_only": 1
},
{
"depends_on": "eval:doc.entry_type=='Capitalization' && (doc.docstatus == 0 || (doc.stock_items && doc.stock_items.length))",
"depends_on": "eval:doc.docstatus == 0 || (doc.stock_items && doc.stock_items.length)",
"fieldname": "section_break_16",
"fieldtype": "Section Break",
"label": "Consumed Stock Items"
@@ -173,14 +165,6 @@
"label": "Stock Items",
"options": "Asset Capitalization Stock Item"
},
{
"depends_on": "eval:doc.entry_type=='Decapitalization'",
"fieldname": "target_warehouse",
"fieldtype": "Link",
"label": "Target Warehouse",
"mandatory_depends_on": "eval:doc.entry_type=='Decapitalization'",
"options": "Warehouse"
},
{
"depends_on": "target_has_batch_no",
"fieldname": "target_batch_no",
@@ -190,20 +174,9 @@
},
{
"default": "1",
"depends_on": "eval:doc.entry_type=='Decapitalization'",
"fieldname": "target_qty",
"fieldtype": "Float",
"label": "Target Qty",
"read_only_depends_on": "eval:doc.entry_type=='Capitalization'"
},
{
"depends_on": "eval:doc.entry_type=='Decapitalization'",
"fetch_from": "target_item_code.stock_uom",
"fieldname": "target_stock_uom",
"fieldtype": "Link",
"label": "Stock UOM",
"options": "UOM",
"read_only": 1
"label": "Target Qty"
},
{
"default": "0",
@@ -241,16 +214,6 @@
"label": "Assets",
"options": "Asset Capitalization Asset Item"
},
{
"default": "Capitalization",
"fieldname": "entry_type",
"fieldtype": "Select",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Entry Type",
"options": "Capitalization\nDecapitalization",
"reqd": 1
},
{
"fieldname": "stock_items_total",
"fieldtype": "Currency",
@@ -272,7 +235,7 @@
"options": "Finance Book"
},
{
"depends_on": "eval:doc.entry_type=='Capitalization' && (doc.docstatus == 0 || (doc.service_items && doc.service_items.length))",
"depends_on": "eval:doc.docstatus == 0 || (doc.service_items && doc.service_items.length)",
"fieldname": "service_expenses_section",
"fieldtype": "Section Break",
"label": "Service Expenses"
@@ -337,26 +300,24 @@
"read_only": 1
},
{
"depends_on": "eval:doc.entry_type=='Capitalization' && doc.capitalization_method=='Create a new composite asset'",
"depends_on": "eval:doc.capitalization_method=='Create a new composite asset'",
"fieldname": "target_asset_location",
"fieldtype": "Link",
"label": "Target Asset Location",
"mandatory_depends_on": "eval:doc.entry_type=='Capitalization' && doc.capitalization_method=='Create a new composite asset'",
"mandatory_depends_on": "eval:doc.capitalization_method=='Create a new composite asset'",
"options": "Location"
},
{
"depends_on": "eval:doc.entry_type=='Capitalization'",
"fieldname": "capitalization_method",
"fieldtype": "Select",
"label": "Capitalization Method",
"mandatory_depends_on": "eval:doc.entry_type=='Capitalization'",
"options": "\nCreate a new composite asset\nChoose a WIP composite asset"
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2023-10-03 22:55:59.461456",
"modified": "2025-01-08 13:14:33.008458",
"modified_by": "Administrator",
"module": "Assets",
"name": "Asset Capitalization",
@@ -400,4 +361,4 @@
"title_field": "title",
"track_changes": 1,
"track_seen": 1
}
}

View File

@@ -41,7 +41,6 @@ force_fields = [
"target_is_fixed_asset",
"target_has_serial_no",
"target_has_batch_no",
"target_stock_uom",
"stock_uom",
"fixed_asset_account",
"valuation_rate",
@@ -73,7 +72,6 @@ class AssetCapitalization(StockController):
capitalization_method: DF.Literal["", "Create a new composite asset", "Choose a WIP composite asset"]
company: DF.Link
cost_center: DF.Link | None
entry_type: DF.Literal["Capitalization", "Decapitalization"]
finance_book: DF.Link | None
naming_series: DF.Literal["ACC-ASC-.YYYY.-"]
posting_date: DF.Date
@@ -96,8 +94,6 @@ class AssetCapitalization(StockController):
target_item_name: DF.Data | None
target_qty: DF.Float
target_serial_no: DF.SmallText | None
target_stock_uom: DF.Link | None
target_warehouse: DF.Link | None
title: DF.Data | None
total_value: DF.Currency
# end: auto-generated types
@@ -190,31 +186,18 @@ class AssetCapitalization(StockController):
def validate_target_item(self):
target_item = frappe.get_cached_doc("Item", self.target_item_code)
if not target_item.is_fixed_asset and not target_item.is_stock_item:
frappe.throw(
_("Target Item {0} is neither a Fixed Asset nor a Stock Item").format(target_item.name)
)
if self.entry_type == "Capitalization" and not target_item.is_fixed_asset:
if not target_item.is_fixed_asset:
frappe.throw(_("Target Item {0} must be a Fixed Asset item").format(target_item.name))
elif self.entry_type == "Decapitalization" and not target_item.is_stock_item:
frappe.throw(_("Target Item {0} must be a Stock Item").format(target_item.name))
if target_item.is_fixed_asset:
self.target_qty = 1
if flt(self.target_qty) <= 0:
frappe.throw(_("Target Qty must be a positive number"))
if not target_item.is_stock_item:
self.target_warehouse = None
if not target_item.has_batch_no:
self.target_batch_no = None
if not target_item.has_serial_no:
self.target_serial_no = ""
if target_item.is_stock_item and not self.target_warehouse:
frappe.throw(_("Target Warehouse is mandatory for Decapitalization"))
self.validate_item(target_item)
def validate_target_asset(self):
@@ -231,7 +214,7 @@ class AssetCapitalization(StockController):
)
)
if target_asset.status in ("Scrapped", "Sold", "Capitalized", "Decapitalized"):
if target_asset.status in ("Scrapped", "Sold", "Capitalized"):
frappe.throw(
_("Target Asset {0} cannot be {1}").format(target_asset.name, target_asset.status)
)
@@ -273,7 +256,7 @@ class AssetCapitalization(StockController):
asset = self.get_asset_for_validation(d.asset)
if asset.status in ("Draft", "Scrapped", "Sold", "Capitalized", "Decapitalized"):
if asset.status in ("Draft", "Scrapped", "Sold", "Capitalized"):
frappe.throw(
_("Row #{0}: Consumed Asset {1} cannot be {2}").format(
d.idx, asset.name, asset.status
@@ -314,9 +297,6 @@ class AssetCapitalization(StockController):
d.cost_center = frappe.get_cached_value("Company", self.company, "cost_center")
def validate_source_mandatory(self):
if not self.target_is_fixed_asset and not self.get("asset_items"):
frappe.throw(_("Consumed Asset Items is mandatory for Decapitalization"))
if self.capitalization_method == "Create a new composite asset" and not (
self.get("stock_items") or self.get("asset_items")
):
@@ -420,18 +400,6 @@ class AssetCapitalization(StockController):
)
sl_entries.append(sle)
if self.entry_type == "Decapitalization" and not self.target_is_fixed_asset:
sle = self.get_sl_entries(
self,
{
"item_code": self.target_item_code,
"warehouse": self.target_warehouse,
"actual_qty": flt(self.target_qty),
"incoming_rate": flt(self.target_incoming_rate),
},
)
sl_entries.append(sle)
# reverse sl entries if cancel
if self.docstatus == 2:
sl_entries.reverse()
@@ -474,21 +442,18 @@ class AssetCapitalization(StockController):
return gl_entries
def get_target_account(self):
if self.target_is_fixed_asset:
from erpnext.assets.doctype.asset.asset import is_cwip_accounting_enabled
from erpnext.assets.doctype.asset.asset import is_cwip_accounting_enabled
asset_category = frappe.get_cached_value("Asset", self.target_asset, "asset_category")
if is_cwip_accounting_enabled(asset_category):
target_account = get_asset_category_account(
"capital_work_in_progress_account",
asset_category=asset_category,
company=self.company,
)
return target_account if target_account else self.target_fixed_asset_account
else:
return self.target_fixed_asset_account
asset_category = frappe.get_cached_value("Asset", self.target_asset, "asset_category")
if is_cwip_accounting_enabled(asset_category):
target_account = get_asset_category_account(
"capital_work_in_progress_account",
asset_category=asset_category,
company=self.company,
)
return target_account if target_account else self.target_fixed_asset_account
else:
return self.warehouse_account[self.target_warehouse]["account"]
return self.target_fixed_asset_account
def get_gl_entries_for_consumed_stock_items(self, gl_entries, target_account, target_against, precision):
# Consumed Stock Items
@@ -589,33 +554,9 @@ class AssetCapitalization(StockController):
item=self,
)
)
else:
# Target Stock Item
sle_list = self.sle_map.get(self.name)
for sle in sle_list:
stock_value_difference = flt(sle.stock_value_difference, precision)
account = self.warehouse_account[sle.warehouse]["account"]
gl_entries.append(
self.get_gl_dict(
{
"account": account,
"against": ", ".join(target_against),
"cost_center": self.cost_center,
"project": self.get("project"),
"remarks": self.get("remarks") or "Accounting Entry for Stock",
"debit": stock_value_difference,
},
self.warehouse_account[sle.warehouse]["account_currency"],
item=self,
)
)
def create_target_asset(self):
if (
self.entry_type != "Capitalization"
or self.capitalization_method != "Create a new composite asset"
):
if self.capitalization_method != "Create a new composite asset":
return
total_target_asset_value = flt(self.total_value, self.precision("total_value"))
@@ -654,10 +595,7 @@ class AssetCapitalization(StockController):
)
def update_target_asset(self):
if (
self.entry_type != "Capitalization"
or self.capitalization_method != "Choose a WIP composite asset"
):
if self.capitalization_method != "Choose a WIP composite asset":
return
total_target_asset_value = flt(self.total_value, self.precision("total_value"))
@@ -700,14 +638,6 @@ class AssetCapitalization(StockController):
get_link_to_form("Asset Capitalization", self.name)
),
)
else:
asset.set_status("Decapitalized")
add_asset_activity(
asset.name,
_("Asset decapitalized after Asset Capitalization {0} was submitted").format(
get_link_to_form("Asset Capitalization", self.name)
),
)
else:
asset.set_status()
add_asset_activity(
@@ -729,16 +659,12 @@ def get_target_item_details(item_code=None, company=None):
# Set Item Details
out.target_item_name = item.item_name
out.target_stock_uom = item.stock_uom
out.target_is_fixed_asset = cint(item.is_fixed_asset)
out.target_has_batch_no = cint(item.has_batch_no)
out.target_has_serial_no = cint(item.has_serial_no)
if out.target_is_fixed_asset:
out.target_qty = 1
out.target_warehouse = None
else:
out.target_asset = None
if not out.target_has_batch_no:
out.target_batch_no = None

View File

@@ -61,7 +61,6 @@ class TestAssetCapitalization(unittest.TestCase):
# Create and submit Asset Captitalization
asset_capitalization = create_asset_capitalization(
entry_type="Capitalization",
capitalization_method="Create a new composite asset",
target_item_code="Macbook Pro",
target_asset_location="Test Location",
@@ -76,7 +75,6 @@ class TestAssetCapitalization(unittest.TestCase):
)
# Test Asset Capitalization values
self.assertEqual(asset_capitalization.entry_type, "Capitalization")
self.assertEqual(asset_capitalization.target_qty, 1)
self.assertEqual(asset_capitalization.stock_items[0].valuation_rate, stock_rate)
@@ -152,7 +150,6 @@ class TestAssetCapitalization(unittest.TestCase):
# Create and submit Asset Captitalization
asset_capitalization = create_asset_capitalization(
entry_type="Capitalization",
capitalization_method="Create a new composite asset",
target_item_code="Macbook Pro",
target_asset_location="Test Location",
@@ -167,7 +164,6 @@ class TestAssetCapitalization(unittest.TestCase):
)
# Test Asset Capitalization values
self.assertEqual(asset_capitalization.entry_type, "Capitalization")
self.assertEqual(asset_capitalization.target_qty, 1)
self.assertEqual(asset_capitalization.stock_items[0].valuation_rate, stock_rate)
@@ -244,7 +240,6 @@ class TestAssetCapitalization(unittest.TestCase):
# Create and submit Asset Captitalization
asset_capitalization = create_asset_capitalization(
entry_type="Capitalization",
capitalization_method="Choose a WIP composite asset",
target_asset=wip_composite_asset.name,
target_asset_location="Test Location",
@@ -256,7 +251,6 @@ class TestAssetCapitalization(unittest.TestCase):
)
# Test Asset Capitalization values
self.assertEqual(asset_capitalization.entry_type, "Capitalization")
self.assertEqual(asset_capitalization.capitalization_method, "Choose a WIP composite asset")
self.assertEqual(asset_capitalization.target_qty, 1)
@@ -297,110 +291,6 @@ class TestAssetCapitalization(unittest.TestCase):
self.assertFalse(get_actual_gle_dict(asset_capitalization.name))
self.assertFalse(get_actual_sle_dict(asset_capitalization.name))
def test_decapitalization_with_depreciation(self):
# Variables
purchase_date = "2020-01-01"
depreciation_start_date = "2020-12-31"
capitalization_date = "2021-06-30"
total_number_of_depreciations = 3
expected_value_after_useful_life = 10_000
consumed_asset_purchase_value = 100_000
consumed_asset_current_value = 70_000
consumed_asset_value_before_disposal = 55_000
target_qty = 10
target_incoming_rate = 5500
depreciation_before_disposal_amount = 15_000
accumulated_depreciation = 45_000
# to accomodate for depreciation on disposal calculation minor difference
consumed_asset_value_before_disposal = 55_123.29
target_incoming_rate = 5512.329
depreciation_before_disposal_amount = 14_876.71
accumulated_depreciation = 44_876.71
# Create assets
consumed_asset = create_depreciation_asset(
asset_name="Asset Capitalization Consumable Asset",
asset_value=consumed_asset_purchase_value,
purchase_date=purchase_date,
depreciation_start_date=depreciation_start_date,
depreciation_method="Straight Line",
total_number_of_depreciations=total_number_of_depreciations,
frequency_of_depreciation=12,
expected_value_after_useful_life=expected_value_after_useful_life,
company="_Test Company with perpetual inventory",
submit=1,
)
first_asset_depr_schedule = get_asset_depr_schedule_doc(consumed_asset.name, "Active")
self.assertEqual(first_asset_depr_schedule.status, "Active")
# Create and submit Asset Captitalization
asset_capitalization = create_asset_capitalization(
entry_type="Decapitalization",
posting_date=capitalization_date, # half a year
target_item_code="Capitalization Target Stock Item",
target_qty=target_qty,
consumed_asset=consumed_asset.name,
company="_Test Company with perpetual inventory",
submit=1,
)
# Test Asset Capitalization values
self.assertEqual(asset_capitalization.entry_type, "Decapitalization")
self.assertEqual(
asset_capitalization.asset_items[0].current_asset_value, consumed_asset_current_value
)
self.assertEqual(
asset_capitalization.asset_items[0].asset_value, consumed_asset_value_before_disposal
)
self.assertEqual(asset_capitalization.asset_items_total, consumed_asset_value_before_disposal)
self.assertEqual(asset_capitalization.total_value, consumed_asset_value_before_disposal)
self.assertEqual(asset_capitalization.target_incoming_rate, target_incoming_rate)
# Test Consumed Asset values
consumed_asset.reload()
self.assertEqual(consumed_asset.status, "Decapitalized")
first_asset_depr_schedule.load_from_db()
second_asset_depr_schedule = get_asset_depr_schedule_doc(consumed_asset.name, "Active")
self.assertEqual(second_asset_depr_schedule.status, "Active")
self.assertEqual(first_asset_depr_schedule.status, "Cancelled")
depr_schedule_of_consumed_asset = second_asset_depr_schedule.get("depreciation_schedule")
consumed_depreciation_schedule = [
d
for d in depr_schedule_of_consumed_asset
if getdate(d.schedule_date) == getdate(capitalization_date)
]
self.assertTrue(consumed_depreciation_schedule and consumed_depreciation_schedule[0].journal_entry)
self.assertEqual(
consumed_depreciation_schedule[0].depreciation_amount, depreciation_before_disposal_amount
)
# Test General Ledger Entries
expected_gle = {
"_Test Warehouse - TCP1": consumed_asset_value_before_disposal,
"_Test Accumulated Depreciations - TCP1": accumulated_depreciation,
"_Test Fixed Asset - TCP1": -consumed_asset_purchase_value,
}
actual_gle = get_actual_gle_dict(asset_capitalization.name)
self.assertEqual(actual_gle, expected_gle)
# Cancel Asset Capitalization and make test entries and status are reversed
asset_capitalization.reload()
asset_capitalization.cancel()
self.assertEqual(consumed_asset.db_get("status"), "Partially Depreciated")
self.assertFalse(get_actual_gle_dict(asset_capitalization.name))
self.assertFalse(get_actual_sle_dict(asset_capitalization.name))
def test_capitalize_only_service_item(self):
company = "_Test Company"
# Variables
@@ -420,7 +310,6 @@ class TestAssetCapitalization(unittest.TestCase):
# Create and submit Asset Captitalization
asset_capitalization = create_asset_capitalization(
entry_type="Capitalization",
capitalization_method="Choose a WIP composite asset",
target_asset=wip_composite_asset.name,
target_asset_location="Test Location",
@@ -468,13 +357,11 @@ def create_asset_capitalization(**args):
target_item_code = target_asset.item_code or args.target_item_code
company = target_asset.company or args.company or "_Test Company"
warehouse = args.warehouse or create_warehouse("_Test Warehouse", company=company)
target_warehouse = args.target_warehouse or warehouse
source_warehouse = args.source_warehouse or warehouse
asset_capitalization = frappe.new_doc("Asset Capitalization")
asset_capitalization.update(
{
"entry_type": args.entry_type or "Capitalization",
"capitalization_method": args.capitalization_method or None,
"company": company,
"posting_date": args.posting_date or now.strftime("%Y-%m-%d"),
@@ -482,7 +369,6 @@ def create_asset_capitalization(**args):
"target_item_code": target_item_code,
"target_asset": target_asset.name,
"target_asset_location": "Test Location",
"target_warehouse": target_warehouse,
"target_qty": flt(args.target_qty) or 1,
"target_batch_no": args.target_batch_no,
"target_serial_no": args.target_serial_no,

View File

@@ -66,12 +66,12 @@ def get_conditions(filters):
conditions["cost_center"] = filters.get("cost_center")
if status:
# In Store assets are those that are not sold or scrapped or capitalized or decapitalized
# In Store assets are those that are not sold or scrapped or capitalized
operand = "not in"
if status not in "In Location":
operand = "in"
conditions["status"] = (operand, ["Sold", "Scrapped", "Capitalized", "Decapitalized"])
conditions["status"] = (operand, ["Sold", "Scrapped", "Capitalized"])
return conditions
@@ -272,9 +272,9 @@ def get_asset_depreciation_amount_map(filters, finance_book):
query = query.where(asset.cost_center == filters.cost_center)
if filters.status:
if filters.status == "In Location":
query = query.where(asset.status.notin(["Sold", "Scrapped", "Capitalized", "Decapitalized"]))
query = query.where(asset.status.notin(["Sold", "Scrapped", "Capitalized"]))
else:
query = query.where(asset.status.isin(["Sold", "Scrapped", "Capitalized", "Decapitalized"]))
query = query.where(asset.status.isin(["Sold", "Scrapped", "Capitalized"]))
if finance_book:
query = query.where((gle.finance_book.isin([cstr(finance_book), ""])) | (gle.finance_book.isnull()))
else:

View File

@@ -777,7 +777,14 @@ class AccountsController(TransactionBase):
ret = get_item_details(args, self, for_validate=for_validate, overwrite_warehouse=False)
for fieldname, value in ret.items():
if item.meta.get_field(fieldname) and value is not None:
if item.get(fieldname) is None or fieldname in force_item_fields:
if (
item.get(fieldname) is None
or fieldname in force_item_fields
or (
fieldname in ["serial_no", "batch_no"]
and item.get("use_serial_batch_fields")
)
):
item.set(fieldname, value)
elif fieldname in ["cost_center", "conversion_factor"] and not item.get(

View File

@@ -1,364 +0,0 @@
// Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.provide("erpnext.tally_migration");
frappe.ui.form.on("Tally Migration", {
onload: function (frm) {
let reload_status = true;
frappe.realtime.on("tally_migration_progress_update", function (data) {
if (reload_status) {
frappe.model.with_doc(frm.doc.doctype, frm.doc.name, () => {
frm.refresh_header();
});
reload_status = false;
}
frm.dashboard.show_progress(data.title, (data.count / data.total) * 100, data.message);
let error_occurred = data.count === -1;
if (data.count == data.total || error_occurred) {
window.setTimeout(
(title) => {
frm.dashboard.hide_progress(title);
frm.reload_doc();
if (error_occurred) {
frappe.msgprint({
message: __("An error has occurred during {0}. Check {1} for more details", [
repl(
"<a href='/app/tally-migration/%(tally_document)s' class='variant-click'>%(tally_document)s</a>",
{
tally_document: frm.docname,
}
),
"<a href='/app/error-log' class='variant-click'>Error Log</a>",
]),
title: __("Tally Migration Error"),
indicator: "red",
});
}
},
2000,
data.title
);
}
});
},
refresh: function (frm) {
frm.trigger("show_logs_preview");
erpnext.tally_migration.failed_import_log = JSON.parse(frm.doc.failed_import_log);
erpnext.tally_migration.fixed_errors_log = JSON.parse(frm.doc.fixed_errors_log);
["default_round_off_account", "default_warehouse", "default_cost_center"].forEach((account) => {
frm.toggle_reqd(account, frm.doc.is_master_data_imported === 1);
frm.toggle_enable(account, frm.doc.is_day_book_data_processed != 1);
});
if (frm.doc.master_data && !frm.doc.is_master_data_imported) {
if (frm.doc.is_master_data_processed) {
if (frm.doc.status != "Importing Master Data") {
frm.events.add_button(frm, __("Import Master Data"), "import_master_data");
}
} else {
if (frm.doc.status != "Processing Master Data") {
frm.events.add_button(frm, __("Process Master Data"), "process_master_data");
}
}
}
if (frm.doc.day_book_data && !frm.doc.is_day_book_data_imported) {
if (frm.doc.is_day_book_data_processed) {
if (frm.doc.status != "Importing Day Book Data") {
frm.events.add_button(frm, __("Import Day Book Data"), "import_day_book_data");
}
} else {
if (frm.doc.status != "Processing Day Book Data") {
frm.events.add_button(frm, __("Process Day Book Data"), "process_day_book_data");
}
}
}
},
erpnext_company: function (frm) {
frappe.db.exists("Company", frm.doc.erpnext_company).then((exists) => {
if (exists) {
frappe.msgprint(
__(
"Company {0} already exists. Continuing will overwrite the Company and Chart of Accounts",
[frm.doc.erpnext_company]
)
);
}
});
},
add_button: function (frm, label, method) {
frm.add_custom_button(label, () => {
frm.call({
doc: frm.doc,
method: method,
freeze: true,
});
frm.reload_doc();
});
},
render_html_table(frm, shown_logs, hidden_logs, field) {
if (shown_logs && shown_logs.length > 0) {
frm.toggle_display(field, true);
} else {
frm.toggle_display(field, false);
return;
}
let rows = erpnext.tally_migration.get_html_rows(shown_logs, field);
let rows_head, table_caption;
let table_footer =
hidden_logs && hidden_logs.length > 0
? `<tr class="text-muted">
<td colspan="4">And ${hidden_logs.length} more others</td>
</tr>`
: "";
if (field === "fixed_error_log_preview") {
rows_head = `<th width="75%">${__("Meta Data")}</th>
<th width="10%">${__("Unresolve")}</th>`;
table_caption = "Resolved Issues";
} else {
rows_head = `<th width="75%">${__("Error Message")}</th>
<th width="10%">${__("Create")}</th>`;
table_caption = "Error Log";
}
frm.get_field(field).$wrapper.html(`
<table class="table table-bordered">
<caption>${table_caption}</caption>
<tr class="text-muted">
<th width="5%">${__("#")}</th>
<th width="10%">${__("DocType")}</th>
${rows_head}
</tr>
${rows}
${table_footer}
</table>
`);
},
show_error_summary(frm) {
let summary = erpnext.tally_migration.failed_import_log.reduce((summary, row) => {
if (row.doc) {
if (summary[row.doc.doctype]) {
summary[row.doc.doctype] += 1;
} else {
summary[row.doc.doctype] = 1;
}
}
return summary;
}, {});
console.table(summary);
},
show_logs_preview(frm) {
let empty = "[]";
let import_log = frm.doc.failed_import_log || empty;
let completed_log = frm.doc.fixed_errors_log || empty;
let render_section = !(import_log === completed_log && import_log === empty);
frm.toggle_display("import_log_section", render_section);
if (render_section) {
frm.trigger("show_error_summary");
frm.trigger("show_errored_import_log");
frm.trigger("show_fixed_errors_log");
}
},
show_errored_import_log(frm) {
let import_log = erpnext.tally_migration.failed_import_log;
let logs = import_log.slice(0, 20);
let hidden_logs = import_log.slice(20);
frm.events.render_html_table(frm, logs, hidden_logs, "failed_import_preview");
},
show_fixed_errors_log(frm) {
let completed_log = erpnext.tally_migration.fixed_errors_log;
let logs = completed_log.slice(0, 20);
let hidden_logs = completed_log.slice(20);
frm.events.render_html_table(frm, logs, hidden_logs, "fixed_error_log_preview");
},
});
erpnext.tally_migration.getError = (traceback) => {
/* Extracts the Error Message from the Python Traceback or Solved error */
let is_multiline = traceback.trim().indexOf("\n") != -1;
let message;
if (is_multiline) {
let exc_error_idx = traceback.trim().lastIndexOf("\n") + 1;
let error_line = traceback.substr(exc_error_idx);
let split_str_idx = error_line.indexOf(":") > 0 ? error_line.indexOf(":") + 1 : 0;
message = error_line.slice(split_str_idx).trim();
} else {
message = traceback;
}
return message;
};
erpnext.tally_migration.cleanDoc = (obj) => {
/* Strips all null and empty values of your JSON object */
let temp = obj;
$.each(temp, function (key, value) {
if (value === "" || value === null) {
delete obj[key];
} else if (Object.prototype.toString.call(value) === "[object Object]") {
erpnext.tally_migration.cleanDoc(value);
} else if ($.isArray(value)) {
$.each(value, function (k, v) {
erpnext.tally_migration.cleanDoc(v);
});
}
});
return temp;
};
erpnext.tally_migration.unresolve = (document) => {
/* Mark document migration as unresolved ie. move to failed error log */
let frm = cur_frm;
let failed_log = erpnext.tally_migration.failed_import_log;
let fixed_log = erpnext.tally_migration.fixed_errors_log;
let modified_fixed_log = fixed_log.filter((row) => {
if (!frappe.utils.deep_equal(erpnext.tally_migration.cleanDoc(row.doc), document)) {
return row;
}
});
failed_log.push({ doc: document, exc: `Marked unresolved on ${Date()}` });
frm.doc.failed_import_log = JSON.stringify(failed_log);
frm.doc.fixed_errors_log = JSON.stringify(modified_fixed_log);
frm.dirty();
frm.save();
};
erpnext.tally_migration.resolve = (document) => {
/* Mark document migration as resolved ie. move to fixed error log */
let frm = cur_frm;
let failed_log = erpnext.tally_migration.failed_import_log;
let fixed_log = erpnext.tally_migration.fixed_errors_log;
let modified_failed_log = failed_log.filter((row) => {
if (!frappe.utils.deep_equal(erpnext.tally_migration.cleanDoc(row.doc), document)) {
return row;
}
});
fixed_log.push({ doc: document, exc: `Solved on ${Date()}` });
frm.doc.failed_import_log = JSON.stringify(modified_failed_log);
frm.doc.fixed_errors_log = JSON.stringify(fixed_log);
frm.dirty();
frm.save();
};
erpnext.tally_migration.create_new_doc = (document) => {
/* Mark as resolved and create new document */
erpnext.tally_migration.resolve(document);
return frappe.call({
type: "POST",
method: "erpnext.erpnext_integrations.doctype.tally_migration.tally_migration.new_doc",
args: {
document,
},
freeze: true,
callback: function (r) {
if (!r.exc) {
frappe.model.sync(r.message);
frappe.get_doc(r.message.doctype, r.message.name).__run_link_triggers = true;
frappe.set_route("Form", r.message.doctype, r.message.name);
}
},
});
};
erpnext.tally_migration.get_html_rows = (logs, field) => {
let index = 0;
let rows = logs
.map(({ doc, exc }) => {
let id = frappe.dom.get_unique_id();
let traceback = exc;
let error_message = erpnext.tally_migration.getError(traceback);
index++;
let show_traceback = `
<button class="btn btn-default btn-xs m-3" type="button" data-toggle="collapse" data-target="#${id}-traceback" aria-expanded="false" aria-controls="${id}-traceback">
${__("Show Traceback")}
</button>
<div class="collapse margin-top" id="${id}-traceback">
<div class="well">
<pre style="font-size: smaller;">${traceback}</pre>
</div>
</div>`;
let show_doc = `
<button class='btn btn-default btn-xs m-3' type='button' data-toggle='collapse' data-target='#${id}-doc' aria-expanded='false' aria-controls='${id}-doc'>
${__("Show Document")}
</button>
<div class="collapse margin-top" id="${id}-doc">
<div class="well">
<pre style="font-size: smaller;">${JSON.stringify(erpnext.tally_migration.cleanDoc(doc), null, 1)}</pre>
</div>
</div>`;
let create_button = `
<button class='btn btn-default btn-xs m-3' type='button' onclick='erpnext.tally_migration.create_new_doc(${JSON.stringify(
doc
)})'>
${__("Create Document")}
</button>`;
let mark_as_unresolved = `
<button class='btn btn-default btn-xs m-3' type='button' onclick='erpnext.tally_migration.unresolve(${JSON.stringify(
doc
)})'>
${__("Mark as unresolved")}
</button>`;
if (field === "fixed_error_log_preview") {
return `<tr>
<td>${index}</td>
<td>
<div>${doc.doctype}</div>
</td>
<td>
<div>${error_message}</div>
<div>${show_doc}</div>
</td>
<td>
<div>${mark_as_unresolved}</div>
</td>
</tr>`;
} else {
return `<tr>
<td>${index}</td>
<td>
<div>${doc.doctype}</div>
</td>
<td>
<div>${error_message}</div>
<div>${show_traceback}</div>
<div>${show_doc}</div>
</td>
<td>
<div>${create_button}</div>
</td>
</tr>`;
}
})
.join("");
return rows;
};

View File

@@ -1,279 +0,0 @@
{
"actions": [],
"beta": 1,
"creation": "2019-02-01 14:27:09.485238",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"status",
"master_data",
"is_master_data_processed",
"is_master_data_imported",
"column_break_2",
"tally_creditors_account",
"tally_debtors_account",
"company_section",
"tally_company",
"default_uom",
"column_break_8",
"erpnext_company",
"processed_files_section",
"chart_of_accounts",
"parties",
"addresses",
"column_break_17",
"uoms",
"items",
"vouchers",
"accounts_section",
"default_warehouse",
"default_round_off_account",
"column_break_21",
"default_cost_center",
"day_book_section",
"day_book_data",
"column_break_27",
"is_day_book_data_processed",
"is_day_book_data_imported",
"import_log_section",
"failed_import_log",
"fixed_errors_log",
"failed_import_preview",
"fixed_error_log_preview"
],
"fields": [
{
"fieldname": "status",
"fieldtype": "Data",
"hidden": 1,
"label": "Status"
},
{
"description": "Data exported from Tally that consists of the Chart of Accounts, Customers, Suppliers, Addresses, Items and UOMs",
"fieldname": "master_data",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Master Data"
},
{
"default": "Sundry Creditors",
"description": "Creditors Account set in Tally",
"fieldname": "tally_creditors_account",
"fieldtype": "Data",
"label": "Tally Creditors Account",
"read_only_depends_on": "eval:doc.is_master_data_processed==1",
"reqd": 1
},
{
"fieldname": "column_break_2",
"fieldtype": "Column Break"
},
{
"default": "Sundry Debtors",
"description": "Debtors Account set in Tally",
"fieldname": "tally_debtors_account",
"fieldtype": "Data",
"label": "Tally Debtors Account",
"read_only_depends_on": "eval:doc.is_master_data_processed==1",
"reqd": 1
},
{
"depends_on": "is_master_data_processed",
"fieldname": "company_section",
"fieldtype": "Section Break"
},
{
"description": "Company Name as per Imported Tally Data",
"fieldname": "tally_company",
"fieldtype": "Data",
"label": "Tally Company",
"read_only": 1
},
{
"fieldname": "column_break_8",
"fieldtype": "Column Break"
},
{
"description": "Your Company set in ERPNext",
"fieldname": "erpnext_company",
"fieldtype": "Data",
"label": "ERPNext Company",
"read_only_depends_on": "eval:doc.is_master_data_processed==1"
},
{
"fieldname": "processed_files_section",
"fieldtype": "Section Break",
"hidden": 1,
"label": "Processed Files"
},
{
"fieldname": "chart_of_accounts",
"fieldtype": "Attach",
"label": "Chart of Accounts"
},
{
"fieldname": "parties",
"fieldtype": "Attach",
"label": "Parties"
},
{
"fieldname": "addresses",
"fieldtype": "Attach",
"label": "Addresses"
},
{
"fieldname": "column_break_17",
"fieldtype": "Column Break"
},
{
"fieldname": "uoms",
"fieldtype": "Attach",
"label": "UOMs"
},
{
"fieldname": "items",
"fieldtype": "Attach",
"label": "Items"
},
{
"fieldname": "vouchers",
"fieldtype": "Attach",
"label": "Vouchers"
},
{
"depends_on": "is_master_data_imported",
"description": "The accounts are set by the system automatically but do confirm these defaults",
"fieldname": "accounts_section",
"fieldtype": "Section Break",
"label": "Accounts"
},
{
"fieldname": "default_warehouse",
"fieldtype": "Link",
"label": "Default Warehouse",
"options": "Warehouse"
},
{
"fieldname": "column_break_21",
"fieldtype": "Column Break"
},
{
"fieldname": "default_cost_center",
"fieldtype": "Link",
"label": "Default Cost Center",
"options": "Cost Center"
},
{
"default": "0",
"fieldname": "is_master_data_processed",
"fieldtype": "Check",
"label": "Is Master Data Processed",
"read_only": 1
},
{
"default": "0",
"fieldname": "is_day_book_data_processed",
"fieldtype": "Check",
"label": "Is Day Book Data Processed",
"read_only": 1
},
{
"default": "0",
"fieldname": "is_day_book_data_imported",
"fieldtype": "Check",
"label": "Is Day Book Data Imported",
"read_only": 1
},
{
"default": "0",
"fieldname": "is_master_data_imported",
"fieldtype": "Check",
"label": "Is Master Data Imported",
"read_only": 1
},
{
"depends_on": "is_master_data_imported",
"fieldname": "day_book_section",
"fieldtype": "Section Break"
},
{
"fieldname": "column_break_27",
"fieldtype": "Column Break"
},
{
"description": "Day Book Data exported from Tally that consists of all historic transactions",
"fieldname": "day_book_data",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Day Book Data"
},
{
"default": "Unit",
"description": "UOM in case unspecified in imported data",
"fieldname": "default_uom",
"fieldtype": "Link",
"label": "Default UOM",
"options": "UOM",
"read_only_depends_on": "eval:doc.is_master_data_imported==1"
},
{
"default": "[]",
"fieldname": "failed_import_log",
"fieldtype": "Code",
"hidden": 1,
"options": "JSON"
},
{
"fieldname": "failed_import_preview",
"fieldtype": "HTML",
"label": "Failed Import Log"
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "default_round_off_account",
"fieldtype": "Link",
"label": "Default Round Off Account",
"options": "Account"
},
{
"default": "[]",
"fieldname": "fixed_errors_log",
"fieldtype": "Code",
"hidden": 1,
"options": "JSON"
},
{
"fieldname": "fixed_error_log_preview",
"fieldtype": "HTML",
"label": "Fixed Error Log"
}
],
"links": [],
"modified": "2020-04-28 00:29:18.039826",
"modified_by": "Administrator",
"module": "ERPNext Integrations",
"name": "Tally Migration",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View File

@@ -1,768 +0,0 @@
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import json
import re
import sys
import traceback
import zipfile
from decimal import Decimal
import frappe
from bs4 import BeautifulSoup as bs
from frappe import _
from frappe.custom.doctype.custom_field.custom_field import (
create_custom_fields as _create_custom_fields,
)
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from erpnext import encode_company_abbr
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts
from erpnext.accounts.doctype.chart_of_accounts_importer.chart_of_accounts_importer import (
unset_existing_data,
)
PRIMARY_ACCOUNT = "Primary"
VOUCHER_CHUNK_SIZE = 500
@frappe.whitelist()
def new_doc(document):
document = json.loads(document)
doctype = document.pop("doctype")
document.pop("name", None)
doc = frappe.new_doc(doctype)
doc.update(document)
return doc
class TallyMigration(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
addresses: DF.Attach | None
chart_of_accounts: DF.Attach | None
day_book_data: DF.Attach | None
default_cost_center: DF.Link | None
default_round_off_account: DF.Link | None
default_uom: DF.Link | None
default_warehouse: DF.Link | None
erpnext_company: DF.Data | None
failed_import_log: DF.Code | None
fixed_errors_log: DF.Code | None
is_day_book_data_imported: DF.Check
is_day_book_data_processed: DF.Check
is_master_data_imported: DF.Check
is_master_data_processed: DF.Check
items: DF.Attach | None
master_data: DF.Attach | None
parties: DF.Attach | None
status: DF.Data | None
tally_company: DF.Data | None
tally_creditors_account: DF.Data
tally_debtors_account: DF.Data
uoms: DF.Attach | None
vouchers: DF.Attach | None
# end: auto-generated types
def validate(self):
failed_import_log = json.loads(self.failed_import_log)
sorted_failed_import_log = sorted(failed_import_log, key=lambda row: row["doc"]["creation"])
self.failed_import_log = json.dumps(sorted_failed_import_log)
def autoname(self):
if not self.name:
self.name = "Tally Migration on " + format_datetime(self.creation)
def get_collection(self, data_file):
def sanitize(string):
return re.sub("&#4;", "", string)
def emptify(string):
string = re.sub(r"<\w+/>", "", string)
string = re.sub(r"<([\w.]+)>\s*<\/\1>", "", string)
string = re.sub(r"\r\n", "", string)
return string
master_file = frappe.get_doc("File", {"file_url": data_file})
master_file_path = master_file.get_full_path()
if zipfile.is_zipfile(master_file_path):
with zipfile.ZipFile(master_file_path) as zf:
encoded_content = zf.read(zf.namelist()[0])
try:
content = encoded_content.decode("utf-8-sig")
except UnicodeDecodeError:
content = encoded_content.decode("utf-16")
master = bs(sanitize(emptify(content)), "xml")
collection = master.BODY.IMPORTDATA.REQUESTDATA
return collection
def dump_processed_data(self, data):
for key, value in data.items():
f = frappe.get_doc(
{
"doctype": "File",
"file_name": key + ".json",
"attached_to_doctype": self.doctype,
"attached_to_name": self.name,
"content": json.dumps(value),
"is_private": True,
}
)
try:
f.insert(ignore_if_duplicate=True)
except frappe.DuplicateEntryError:
pass
setattr(self, key, f.file_url)
def set_account_defaults(self):
self.default_cost_center, self.default_round_off_account = frappe.db.get_value(
"Company", self.erpnext_company, ["cost_center", "round_off_account"]
)
self.default_warehouse = frappe.db.get_value("Stock Settings", "Stock Settings", "default_warehouse")
def _process_master_data(self):
def get_company_name(collection):
return collection.find_all("REMOTECMPINFO.LIST")[0].REMOTECMPNAME.string.strip()
def get_coa_customers_suppliers(collection):
root_type_map = {
"Application of Funds (Assets)": "Asset",
"Expenses": "Expense",
"Income": "Income",
"Source of Funds (Liabilities)": "Liability",
}
roots = set(root_type_map.keys())
accounts = list(get_groups(collection.find_all("GROUP"))) + list(
get_ledgers(collection.find_all("LEDGER"))
)
children, parents = get_children_and_parent_dict(accounts)
group_set = [acc[1] for acc in accounts if acc[2]]
children, customers, suppliers = remove_parties(parents, children, group_set)
try:
coa = traverse({}, children, roots, roots, group_set)
except RecursionError:
self.log(
_(
"Error occured while parsing Chart of Accounts: Please make sure that no two accounts have the same name"
)
)
for account in coa:
coa[account]["root_type"] = root_type_map[account]
return coa, customers, suppliers
def get_groups(accounts):
for account in accounts:
if account["NAME"] in (self.tally_creditors_account, self.tally_debtors_account):
yield get_parent(account), account["NAME"], 0
else:
yield get_parent(account), account["NAME"], 1
def get_ledgers(accounts):
for account in accounts:
# If Ledger doesn't have PARENT field then don't create Account
# For example "Profit & Loss A/c"
if account.PARENT:
yield account.PARENT.string.strip(), account["NAME"], 0
def get_parent(account):
if account.PARENT:
return account.PARENT.string.strip()
return {
("Yes", "No"): "Application of Funds (Assets)",
("Yes", "Yes"): "Expenses",
("No", "Yes"): "Income",
("No", "No"): "Source of Funds (Liabilities)",
}[(account.ISDEEMEDPOSITIVE.string.strip(), account.ISREVENUE.string.strip())]
def get_children_and_parent_dict(accounts):
children, parents = {}, {}
for parent, account, _is_group in accounts:
children.setdefault(parent, set()).add(account)
parents.setdefault(account, set()).add(parent)
parents[account].update(parents.get(parent, []))
return children, parents
def remove_parties(parents, children, group_set):
customers, suppliers = set(), set()
for account in parents:
found = False
if self.tally_creditors_account in parents[account]:
found = True
if account not in group_set:
suppliers.add(account)
if self.tally_debtors_account in parents[account]:
found = True
if account not in group_set:
customers.add(account)
if found:
children.pop(account, None)
return children, customers, suppliers
def traverse(tree, children, accounts, roots, group_set):
for account in accounts:
if account in group_set or account in roots:
if account in children:
tree[account] = traverse({}, children, children[account], roots, group_set)
else:
tree[account] = {"is_group": 1}
else:
tree[account] = {}
return tree
def get_parties_addresses(collection, customers, suppliers):
parties, addresses = [], []
for account in collection.find_all("LEDGER"):
party_type = None
links = []
if account.NAME.string.strip() in customers:
party_type = "Customer"
parties.append(
{
"doctype": party_type,
"customer_name": account.NAME.string.strip(),
"tax_id": account.INCOMETAXNUMBER.string.strip()
if account.INCOMETAXNUMBER
else None,
"customer_group": "All Customer Groups",
"territory": "All Territories",
"customer_type": "Individual",
}
)
links.append({"link_doctype": party_type, "link_name": account["NAME"]})
if account.NAME.string.strip() in suppliers:
party_type = "Supplier"
parties.append(
{
"doctype": party_type,
"supplier_name": account.NAME.string.strip(),
"pan": account.INCOMETAXNUMBER.string.strip()
if account.INCOMETAXNUMBER
else None,
"supplier_group": "All Supplier Groups",
"supplier_type": "Individual",
}
)
links.append({"link_doctype": party_type, "link_name": account["NAME"]})
if party_type:
address = "\n".join([a.string.strip() for a in account.find_all("ADDRESS")])
addresses.append(
{
"doctype": "Address",
"address_line1": address[:140].strip(),
"address_line2": address[140:].strip(),
"country": account.COUNTRYNAME.string.strip() if account.COUNTRYNAME else None,
"state": account.LEDSTATENAME.string.strip() if account.LEDSTATENAME else None,
"gst_state": account.LEDSTATENAME.string.strip()
if account.LEDSTATENAME
else None,
"pin_code": account.PINCODE.string.strip() if account.PINCODE else None,
"mobile": account.LEDGERPHONE.string.strip() if account.LEDGERPHONE else None,
"phone": account.LEDGERPHONE.string.strip() if account.LEDGERPHONE else None,
"gstin": account.PARTYGSTIN.string.strip() if account.PARTYGSTIN else None,
"links": links,
}
)
return parties, addresses
def get_stock_items_uoms(collection):
uoms = []
for uom in collection.find_all("UNIT"):
uoms.append({"doctype": "UOM", "uom_name": uom.NAME.string.strip()})
items = []
for item in collection.find_all("STOCKITEM"):
stock_uom = item.BASEUNITS.string.strip() if item.BASEUNITS else self.default_uom
items.append(
{
"doctype": "Item",
"item_code": item.NAME.string.strip(),
"stock_uom": stock_uom.strip(),
"is_stock_item": 0,
"item_group": "All Item Groups",
"item_defaults": [{"company": self.erpnext_company}],
}
)
return items, uoms
try:
self.publish("Process Master Data", _("Reading Uploaded File"), 1, 5)
collection = self.get_collection(self.master_data)
company = get_company_name(collection)
self.tally_company = company
self.erpnext_company = company
self.publish("Process Master Data", _("Processing Chart of Accounts and Parties"), 2, 5)
chart_of_accounts, customers, suppliers = get_coa_customers_suppliers(collection)
self.publish("Process Master Data", _("Processing Party Addresses"), 3, 5)
parties, addresses = get_parties_addresses(collection, customers, suppliers)
self.publish("Process Master Data", _("Processing Items and UOMs"), 4, 5)
items, uoms = get_stock_items_uoms(collection)
data = {
"chart_of_accounts": chart_of_accounts,
"parties": parties,
"addresses": addresses,
"items": items,
"uoms": uoms,
}
self.publish("Process Master Data", _("Done"), 5, 5)
self.dump_processed_data(data)
self.is_master_data_processed = 1
except Exception:
self.publish("Process Master Data", _("Process Failed"), -1, 5)
self.log()
finally:
self.set_status()
def publish(self, title, message, count, total):
frappe.publish_realtime(
"tally_migration_progress_update",
{"title": title, "message": message, "count": count, "total": total},
user=self.modified_by,
)
def _import_master_data(self):
def create_company_and_coa(coa_file_url):
coa_file = frappe.get_doc("File", {"file_url": coa_file_url})
frappe.local.flags.ignore_chart_of_accounts = True
try:
company = frappe.get_doc(
{
"doctype": "Company",
"company_name": self.erpnext_company,
"default_currency": "INR",
"enable_perpetual_inventory": 0,
}
).insert()
except frappe.DuplicateEntryError:
company = frappe.get_doc("Company", self.erpnext_company)
unset_existing_data(self.erpnext_company)
frappe.local.flags.ignore_chart_of_accounts = False
create_charts(company.name, custom_chart=json.loads(coa_file.get_content()))
company.create_default_warehouses()
def create_parties_and_addresses(parties_file_url, addresses_file_url):
parties_file = frappe.get_doc("File", {"file_url": parties_file_url})
for party in json.loads(parties_file.get_content()):
try:
party_doc = frappe.get_doc(party)
party_doc.insert()
except Exception:
self.log(party_doc)
addresses_file = frappe.get_doc("File", {"file_url": addresses_file_url})
for address in json.loads(addresses_file.get_content()):
try:
address_doc = frappe.get_doc(address)
address_doc.insert(ignore_mandatory=True)
except Exception:
self.log(address_doc)
def create_items_uoms(items_file_url, uoms_file_url):
uoms_file = frappe.get_doc("File", {"file_url": uoms_file_url})
for uom in json.loads(uoms_file.get_content()):
if not frappe.db.exists(uom):
try:
uom_doc = frappe.get_doc(uom)
uom_doc.insert()
except Exception:
self.log(uom_doc)
items_file = frappe.get_doc("File", {"file_url": items_file_url})
for item in json.loads(items_file.get_content()):
try:
item_doc = frappe.get_doc(item)
item_doc.insert()
except Exception:
self.log(item_doc)
try:
self.publish("Import Master Data", _("Creating Company and Importing Chart of Accounts"), 1, 4)
create_company_and_coa(self.chart_of_accounts)
self.publish("Import Master Data", _("Importing Parties and Addresses"), 2, 4)
create_parties_and_addresses(self.parties, self.addresses)
self.publish("Import Master Data", _("Importing Items and UOMs"), 3, 4)
create_items_uoms(self.items, self.uoms)
self.publish("Import Master Data", _("Done"), 4, 4)
self.set_account_defaults()
self.is_master_data_imported = 1
frappe.db.commit()
except Exception:
self.publish("Import Master Data", _("Process Failed"), -1, 5)
frappe.db.rollback()
self.log()
finally:
self.set_status()
def _process_day_book_data(self):
def get_vouchers(collection):
vouchers = []
for voucher in collection.find_all("VOUCHER"):
if voucher.ISCANCELLED.string.strip() == "Yes":
continue
inventory_entries = (
voucher.find_all("INVENTORYENTRIES.LIST")
+ voucher.find_all("ALLINVENTORYENTRIES.LIST")
+ voucher.find_all("INVENTORYENTRIESIN.LIST")
+ voucher.find_all("INVENTORYENTRIESOUT.LIST")
)
if (
voucher.VOUCHERTYPENAME.string.strip() not in ["Journal", "Receipt", "Payment", "Contra"]
and inventory_entries
):
function = voucher_to_invoice
else:
function = voucher_to_journal_entry
try:
processed_voucher = function(voucher)
if processed_voucher:
vouchers.append(processed_voucher)
frappe.db.commit()
except Exception:
frappe.db.rollback()
self.log(voucher)
return vouchers
def voucher_to_journal_entry(voucher):
accounts = []
ledger_entries = voucher.find_all("ALLLEDGERENTRIES.LIST") + voucher.find_all(
"LEDGERENTRIES.LIST"
)
for entry in ledger_entries:
account = {
"account": encode_company_abbr(entry.LEDGERNAME.string.strip(), self.erpnext_company),
"cost_center": self.default_cost_center,
}
if entry.ISPARTYLEDGER.string.strip() == "Yes":
party_details = get_party(entry.LEDGERNAME.string.strip())
if party_details:
party_type, party_account = party_details
account["party_type"] = party_type
account["account"] = party_account
account["party"] = entry.LEDGERNAME.string.strip()
amount = Decimal(entry.AMOUNT.string.strip())
if amount > 0:
account["credit_in_account_currency"] = str(abs(amount))
else:
account["debit_in_account_currency"] = str(abs(amount))
accounts.append(account)
journal_entry = {
"doctype": "Journal Entry",
"tally_guid": voucher.GUID.string.strip(),
"tally_voucher_no": voucher.VOUCHERNUMBER.string.strip() if voucher.VOUCHERNUMBER else "",
"posting_date": voucher.DATE.string.strip(),
"company": self.erpnext_company,
"accounts": accounts,
}
return journal_entry
def voucher_to_invoice(voucher):
if voucher.VOUCHERTYPENAME.string.strip() in ["Sales", "Credit Note"]:
doctype = "Sales Invoice"
party_field = "customer"
account_field = "debit_to"
account_name = encode_company_abbr(self.tally_debtors_account, self.erpnext_company)
price_list_field = "selling_price_list"
elif voucher.VOUCHERTYPENAME.string.strip() in ["Purchase", "Debit Note"]:
doctype = "Purchase Invoice"
party_field = "supplier"
account_field = "credit_to"
account_name = encode_company_abbr(self.tally_creditors_account, self.erpnext_company)
price_list_field = "buying_price_list"
else:
# Do not handle vouchers other than "Purchase", "Debit Note", "Sales" and "Credit Note"
# Do not handle Custom Vouchers either
return
invoice = {
"doctype": doctype,
party_field: voucher.PARTYNAME.string.strip(),
"tally_guid": voucher.GUID.string.strip(),
"tally_voucher_no": voucher.VOUCHERNUMBER.string.strip() if voucher.VOUCHERNUMBER else "",
"posting_date": voucher.DATE.string.strip(),
"due_date": voucher.DATE.string.strip(),
"items": get_voucher_items(voucher, doctype),
"taxes": get_voucher_taxes(voucher),
account_field: account_name,
price_list_field: "Tally Price List",
"set_posting_time": 1,
"disable_rounded_total": 1,
"company": self.erpnext_company,
}
return invoice
def get_voucher_items(voucher, doctype):
inventory_entries = (
voucher.find_all("INVENTORYENTRIES.LIST")
+ voucher.find_all("ALLINVENTORYENTRIES.LIST")
+ voucher.find_all("INVENTORYENTRIESIN.LIST")
+ voucher.find_all("INVENTORYENTRIESOUT.LIST")
)
if doctype == "Sales Invoice":
account_field = "income_account"
elif doctype == "Purchase Invoice":
account_field = "expense_account"
items = []
for entry in inventory_entries:
qty, uom = entry.ACTUALQTY.string.strip().split()
items.append(
{
"item_code": entry.STOCKITEMNAME.string.strip(),
"description": entry.STOCKITEMNAME.string.strip(),
"qty": qty.strip(),
"uom": uom.strip(),
"conversion_factor": 1,
"price_list_rate": entry.RATE.string.strip().split("/")[0],
"cost_center": self.default_cost_center,
"warehouse": self.default_warehouse,
account_field: encode_company_abbr(
entry.find_all("ACCOUNTINGALLOCATIONS.LIST")[0].LEDGERNAME.string.strip(),
self.erpnext_company,
),
}
)
return items
def get_voucher_taxes(voucher):
ledger_entries = voucher.find_all("ALLLEDGERENTRIES.LIST") + voucher.find_all(
"LEDGERENTRIES.LIST"
)
taxes = []
for entry in ledger_entries:
if entry.ISPARTYLEDGER.string.strip() == "No":
tax_account = encode_company_abbr(entry.LEDGERNAME.string.strip(), self.erpnext_company)
taxes.append(
{
"charge_type": "Actual",
"account_head": tax_account,
"description": tax_account,
"tax_amount": entry.AMOUNT.string.strip(),
"cost_center": self.default_cost_center,
}
)
return taxes
def get_party(party):
if frappe.db.exists({"doctype": "Supplier", "supplier_name": party}):
return "Supplier", encode_company_abbr(self.tally_creditors_account, self.erpnext_company)
elif frappe.db.exists({"doctype": "Customer", "customer_name": party}):
return "Customer", encode_company_abbr(self.tally_debtors_account, self.erpnext_company)
try:
self.publish("Process Day Book Data", _("Reading Uploaded File"), 1, 3)
collection = self.get_collection(self.day_book_data)
self.publish("Process Day Book Data", _("Processing Vouchers"), 2, 3)
vouchers = get_vouchers(collection)
self.publish("Process Day Book Data", _("Done"), 3, 3)
self.dump_processed_data({"vouchers": vouchers})
self.is_day_book_data_processed = 1
except Exception:
self.publish("Process Day Book Data", _("Process Failed"), -1, 5)
self.log()
finally:
self.set_status()
def _import_day_book_data(self):
def create_fiscal_years(vouchers):
from frappe.utils.data import add_years, getdate
earliest_date = getdate(min(voucher["posting_date"] for voucher in vouchers))
oldest_year = frappe.get_all(
"Fiscal Year", fields=["year_start_date", "year_end_date"], order_by="year_start_date"
)[0]
while earliest_date < oldest_year.year_start_date:
new_year = frappe.get_doc({"doctype": "Fiscal Year"})
new_year.year_start_date = add_years(oldest_year.year_start_date, -1)
new_year.year_end_date = add_years(oldest_year.year_end_date, -1)
if new_year.year_start_date.year == new_year.year_end_date.year:
new_year.year = new_year.year_start_date.year
else:
new_year.year = f"{new_year.year_start_date.year}-{new_year.year_end_date.year}"
new_year.save()
oldest_year = new_year
def create_custom_fields():
_create_custom_fields(
{
("Journal Entry", "Purchase Invoice", "Sales Invoice"): [
{
"fieldtype": "Data",
"fieldname": "tally_guid",
"read_only": 1,
"label": "Tally GUID",
},
{
"fieldtype": "Data",
"fieldname": "tally_voucher_no",
"read_only": 1,
"label": "Tally Voucher Number",
},
]
}
)
def create_price_list():
frappe.get_doc(
{
"doctype": "Price List",
"price_list_name": "Tally Price List",
"selling": 1,
"buying": 1,
"enabled": 1,
"currency": "INR",
}
).insert()
try:
frappe.db.set_value(
"Account",
encode_company_abbr(self.tally_creditors_account, self.erpnext_company),
"account_type",
"Payable",
)
frappe.db.set_value(
"Account",
encode_company_abbr(self.tally_debtors_account, self.erpnext_company),
"account_type",
"Receivable",
)
frappe.db.set_value(
"Company", self.erpnext_company, "round_off_account", self.default_round_off_account
)
vouchers_file = frappe.get_doc("File", {"file_url": self.vouchers})
vouchers = json.loads(vouchers_file.get_content())
create_fiscal_years(vouchers)
create_price_list()
create_custom_fields()
total = len(vouchers)
is_last = False
for index in range(0, total, VOUCHER_CHUNK_SIZE):
if index + VOUCHER_CHUNK_SIZE >= total:
is_last = True
frappe.enqueue_doc(
self.doctype,
self.name,
"_import_vouchers",
queue="long",
timeout=3600,
start=index + 1,
total=total,
is_last=is_last,
)
except Exception:
self.log()
finally:
self.set_status()
def _import_vouchers(self, start, total, is_last=False):
frappe.flags.in_migrate = True
vouchers_file = frappe.get_doc("File", {"file_url": self.vouchers})
vouchers = json.loads(vouchers_file.get_content())
chunk = vouchers[start : start + VOUCHER_CHUNK_SIZE]
for index, voucher in enumerate(chunk, start=start):
try:
voucher_doc = frappe.get_doc(voucher)
voucher_doc.insert()
voucher_doc.submit()
self.publish("Importing Vouchers", _("{} of {}").format(index, total), index, total)
frappe.db.commit()
except Exception:
frappe.db.rollback()
self.log(voucher_doc)
if is_last:
self.status = ""
self.is_day_book_data_imported = 1
self.save()
frappe.db.set_value("Price List", "Tally Price List", "enabled", 0)
frappe.flags.in_migrate = False
@frappe.whitelist()
def process_master_data(self):
self.set_status("Processing Master Data")
frappe.enqueue_doc(self.doctype, self.name, "_process_master_data", queue="long", timeout=3600)
@frappe.whitelist()
def import_master_data(self):
self.set_status("Importing Master Data")
frappe.enqueue_doc(self.doctype, self.name, "_import_master_data", queue="long", timeout=3600)
@frappe.whitelist()
def process_day_book_data(self):
self.set_status("Processing Day Book Data")
frappe.enqueue_doc(self.doctype, self.name, "_process_day_book_data", queue="long", timeout=3600)
@frappe.whitelist()
def import_day_book_data(self):
self.set_status("Importing Day Book Data")
frappe.enqueue_doc(self.doctype, self.name, "_import_day_book_data", queue="long", timeout=3600)
def log(self, data=None):
if isinstance(data, frappe.model.document.Document):
if sys.exc_info()[1].__class__ != frappe.DuplicateEntryError:
failed_import_log = json.loads(self.failed_import_log)
doc = data.as_dict()
failed_import_log.append({"doc": doc, "exc": traceback.format_exc()})
self.failed_import_log = json.dumps(failed_import_log, separators=(",", ":"))
self.save()
frappe.db.commit()
else:
data = data or self.status
message = "\n".join(
[
"Data:",
json.dumps(data, default=str, indent=4),
"--" * 50,
"\nException:",
traceback.format_exc(),
]
)
return frappe.log_error(title="Tally Migration Error", message=message)
def set_status(self, status=""):
self.status = status
self.save()

View File

@@ -1,8 +0,0 @@
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import unittest
class TestTallyMigration(unittest.TestCase):
pass

View File

@@ -412,7 +412,6 @@ scheduler_events = {
"cron": {
"0/15 * * * *": [
"erpnext.manufacturing.doctype.bom_update_log.bom_update_log.resume_bom_cost_update_jobs",
"erpnext.accounts.doctype.process_payment_reconciliation.process_payment_reconciliation.trigger_reconciliation_for_queued_docs",
],
"0/30 * * * *": [
"erpnext.utilities.doctype.video.video.update_youtube_data",

View File

@@ -522,6 +522,7 @@ class TestJobCard(FrappeTestCase):
production_item=item_code,
bom_no=bom_doc.name,
skip_transfer=1,
from_wip_warehouse=1,
wip_warehouse=warehouse,
source_warehouse=warehouse,
)

View File

@@ -207,7 +207,7 @@
"description": "In the case of 'Use Multi-Level BOM' in a work order, if the user wishes to add sub-assembly costs to Finished Goods items without using a job card as well the scrap items, then this option needs to be enable.",
"fieldname": "set_op_cost_and_scrape_from_sub_assemblies",
"fieldtype": "Check",
"label": "Set Operating Cost / Scrape Items From Sub-assemblies"
"label": "Set Operating Cost / Scrap Items From Sub-assemblies"
},
{
"default": "0",
@@ -249,7 +249,7 @@
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2025-01-02 12:46:33.520853",
"modified": "2025-01-09 16:02:23.326763",
"modified_by": "Administrator",
"module": "Manufacturing",
"name": "Manufacturing Settings",

View File

@@ -744,9 +744,9 @@ class TestProductionPlan(FrappeTestCase):
"""
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
make_stock_entry(item_code="_Test Item", target="Work In Progress - _TC", qty=2, basic_rate=100)
make_stock_entry(item_code="_Test Item", target="_Test Warehouse - _TC", qty=2, basic_rate=100)
make_stock_entry(
item_code="_Test Item Home Desktop 100", target="Work In Progress - _TC", qty=4, basic_rate=100
item_code="_Test Item Home Desktop 100", target="_Test Warehouse - _TC", qty=4, basic_rate=100
)
item = "_Test FG Item"
@@ -794,10 +794,10 @@ class TestProductionPlan(FrappeTestCase):
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
make_stock_entry(
item_code="Raw Material Item 1", target="Work In Progress - _TC", qty=2, basic_rate=100
item_code="Raw Material Item 1", target="_Test Warehouse - _TC", qty=2, basic_rate=100
)
make_stock_entry(
item_code="Raw Material Item 2", target="Work In Progress - _TC", qty=2, basic_rate=100
item_code="Raw Material Item 2", target="_Test Warehouse - _TC", qty=2, basic_rate=100
)
pln = create_production_plan(item_code="Test Production Item 1", skip_getting_mr_items=True)

View File

@@ -714,7 +714,12 @@ class TestWorkOrder(FrappeTestCase):
self.assertEqual(row.item_code, fg_item)
work_order = make_wo_order_test_record(
item=fg_item, skip_transfer=True, planned_start_date=now(), qty=30, do_not_save=True
item=fg_item,
skip_transfer=True,
planned_start_date=now(),
qty=30,
do_not_save=True,
source_warehouse="_Test Warehouse - _TC",
)
work_order.batch_size = 10
work_order.insert()
@@ -931,11 +936,13 @@ class TestWorkOrder(FrappeTestCase):
wip_warehouse=wip_warehouse,
qty=qty,
skip_transfer=1,
source_warehouse=wip_warehouse,
stock_uom=fg_item_non_whole.stock_uom,
)
se = frappe.get_doc(make_stock_entry(wo.name, "Material Transfer for Manufacture", qty))
se.get("items")[0].s_warehouse = "Stores - _TC"
se.get("items")[0].t_warehouse = wip_warehouse
se.insert()
se.submit()
@@ -1007,7 +1014,12 @@ class TestWorkOrder(FrappeTestCase):
bom.submit()
wo_order = make_wo_order_test_record(
item=item, company=company, planned_start_date=now(), qty=20, skip_transfer=1
item=item,
company=company,
planned_start_date=now(),
qty=20,
skip_transfer=1,
from_wip_warehouse=1,
)
job_card = frappe.db.get_value("Job Card", {"work_order": wo_order.name}, "name")
update_job_card(job_card)
@@ -1019,7 +1031,12 @@ class TestWorkOrder(FrappeTestCase):
# Partial Job Card 1 with qty 10
wo_order = make_wo_order_test_record(
item=item, company=company, planned_start_date=add_days(now(), 60), qty=20, skip_transfer=1
item=item,
company=company,
planned_start_date=add_days(now(), 60),
qty=20,
skip_transfer=1,
from_wip_warehouse=1,
)
job_card = frappe.db.get_value("Job Card", {"work_order": wo_order.name}, "name")
update_job_card(job_card, 10, 1)
@@ -2045,6 +2062,8 @@ class TestWorkOrder(FrappeTestCase):
bom_no=bom_doc.name,
qty=1,
skip_transfer=1,
from_wip_warehouse=1,
source_warehouse="_Test Warehouse - _TC",
)
job_cards = frappe.get_all("Job Card", filters={"work_order": wo.name})
@@ -2451,6 +2470,37 @@ class TestWorkOrder(FrappeTestCase):
frappe.db.set_single_value("Manufacturing Settings", "validate_components_quantities_per_bom", 0)
def test_wip_skip(self):
wo = make_wo_order_test_record(
item="_Test FG Item",
qty=10,
source_warehouse="_Test Warehouse - _TC",
wip_warehouse="Stores - _TC",
)
manufacture_entry = frappe.get_doc(make_stock_entry(wo.name, "Manufacture", 10))
self.assertEqual(manufacture_entry.items[0].s_warehouse, "Stores - _TC")
wo = make_wo_order_test_record(
item="_Test FG Item",
qty=10,
source_warehouse="_Test Warehouse - _TC",
wip_warehouse="Stores - _TC",
skip_transfer=1,
)
manufacture_entry = frappe.get_doc(make_stock_entry(wo.name, "Manufacture", 10))
self.assertEqual(manufacture_entry.items[0].s_warehouse, "_Test Warehouse - _TC")
wo = make_wo_order_test_record(
item="_Test FG Item",
qty=10,
source_warehouse="_Test Warehouse - _TC",
wip_warehouse="Stores - _TC",
skip_transfer=1,
from_wip_warehouse=1,
)
manufacture_entry = frappe.get_doc(make_stock_entry(wo.name, "Manufacture", 10))
self.assertEqual(manufacture_entry.items[0].s_warehouse, "Stores - _TC")
def make_operation(**kwargs):
kwargs = frappe._dict(kwargs)

View File

@@ -152,6 +152,7 @@ class WorkOrder(Document):
self.validate_sales_order()
self.set_default_warehouse()
self.validate_warehouse_belongs_to_company()
self.check_wip_warehouse_skip()
self.calculate_operating_cost()
self.validate_qty()
self.validate_transfer_against()
@@ -251,6 +252,10 @@ class WorkOrder(Document):
if not self.fg_warehouse:
self.fg_warehouse = frappe.db.get_single_value("Manufacturing Settings", "default_fg_warehouse")
def check_wip_warehouse_skip(self):
if self.skip_transfer and not self.from_wip_warehouse:
self.wip_warehouse = None
def validate_warehouse_belongs_to_company(self):
warehouses = [self.fg_warehouse, self.wip_warehouse]
for d in self.get("required_items"):
@@ -1421,7 +1426,11 @@ def make_stock_entry(work_order_id, purpose, qty=None, target_warehouse=None):
stock_entry.to_warehouse = wip_warehouse
stock_entry.project = work_order.project
else:
stock_entry.from_warehouse = wip_warehouse
stock_entry.from_warehouse = (
work_order.source_warehouse
if work_order.skip_transfer and not work_order.from_wip_warehouse
else wip_warehouse
)
stock_entry.to_warehouse = work_order.fg_warehouse
stock_entry.project = work_order.project

View File

@@ -387,3 +387,5 @@ erpnext.patches.v15_0.set_is_exchange_gain_loss_in_payment_entry_deductions
erpnext.patches.v15_0.enable_allow_existing_serial_no
erpnext.patches.v15_0.update_cc_in_process_statement_of_accounts
erpnext.patches.v15_0.update_asset_status_to_work_in_progress
erpnext.patches.v15_0.migrate_checkbox_to_select_for_reconciliation_effect
erpnext.patches.v15_0.sync_auto_reconcile_config

View File

@@ -0,0 +1,18 @@
import frappe
def execute():
"""
A New select field 'reconciliation_takes_effect_on' has been added to control Advance Payment Reconciliation dates.
Migrate old checkbox configuration to new select field on 'Company' and 'Payment Entry'
"""
companies = frappe.db.get_all("Company", fields=["name", "reconciliation_takes_effect_on"])
for x in companies:
new_value = (
"Advance Payment Date" if x.reconcile_on_advance_payment_date else "Oldest Of Invoice Or Advance"
)
frappe.db.set_value("Company", x.name, "reconciliation_takes_effect_on", new_value)
frappe.db.sql(
"""update `tabPayment Entry` set advance_reconciliation_takes_effect_on = if(reconcile_on_advance_payment_date = 0, 'Oldest Of Invoice Or Advance', 'Advance Payment Date')"""
)

View File

@@ -0,0 +1,26 @@
import frappe
from erpnext.accounts.utils import sync_auto_reconcile_config
def execute():
"""
Set default Cron Interval and Queue size
"""
frappe.db.set_single_value("Accounts Settings", "auto_reconciliation_job_trigger", 15)
frappe.db.set_single_value("Accounts Settings", "reconciliation_queue_size", 5)
# Create Scheduler Event record if it doesn't exist
method = "erpnext.accounts.doctype.process_payment_reconciliation.process_payment_reconciliation.trigger_reconciliation_for_queued_docs"
if not frappe.db.get_all(
"Scheduler Event", {"scheduled_against": "Process Payment Reconciliation", "method": method}
):
frappe.get_doc(
{
"doctype": "Scheduler Event",
"scheduled_against": "Process Payment Reconciliation",
"method": method,
}
).save()
sync_auto_reconcile_config(15)

View File

@@ -166,7 +166,7 @@ class Timesheet(Document):
if data.task and data.task not in tasks:
task = frappe.get_doc("Task", data.task)
task.update_time_and_costing()
task.save()
task.save(ignore_permissions=True)
tasks.append(data.task)
if data.project and data.project not in projects:
@@ -175,7 +175,7 @@ class Timesheet(Document):
for project in projects:
project_doc = frappe.get_doc("Project", project)
project_doc.update_project()
project_doc.save()
project_doc.save(ignore_permissions=True)
def validate_dates(self):
for data in self.time_logs:

View File

@@ -157,10 +157,17 @@ erpnext.buying = {
if(!frappe.meta.has_field(this.frm.doc.doctype, "billing_address")) return;
frappe.call({
method: "erpnext.setup.doctype.company.company.get_default_company_address",
args: { name: this.frm.doc.company, existing_address:this.frm.doc.billing_address },
method: "erpnext.setup.doctype.company.company.get_billing_shipping_address",
args: {
name: this.frm.doc.company,
billing_address:this.frm.doc.billing_address,
shipping_address: this.frm.doc.shipping_address
},
callback: (r) => {
this.frm.set_value("billing_address", r.message || "");
this.frm.set_value("billing_address", r.message.primary_address || "");
if(!frappe.meta.has_field(this.frm.doc.doctype, "shipping_address")) return;
this.frm.set_value("shipping_address", r.message.shipping_address || "");
},
});
}

View File

@@ -579,6 +579,8 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
child_doctype: item.doctype,
child_docname: item.name,
is_old_subcontracting_flow: me.frm.doc.is_old_subcontracting_flow,
use_serial_batch_fields: item.use_serial_batch_fields,
serial_and_batch_bundle: item.serial_and_batch_bundle,
}
},
@@ -1825,18 +1827,16 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
apply_rule_on_other_items(args) {
const me = this;
const fields = ["discount_percentage", "pricing_rules", "discount_amount", "rate"];
const fields = ["pricing_rules"];
for(var k in args) {
let data = args[k];
if (data && data.apply_rule_on_other_items && JSON.parse(data.apply_rule_on_other_items)) {
fields.push(frappe.scrub(data.pricing_rule_for))
me.frm.doc.items.forEach(d => {
if (in_list(JSON.parse(data.apply_rule_on_other_items), d[data.apply_rule_on]) && d.item_code === data.item_code) {
if (JSON.parse(data.apply_rule_on_other_items).includes(d[data.apply_rule_on])) {
for(var k in data) {
if (data.pricing_rule_for == "Discount Percentage" && data.apply_rule_on_other_items && k == "discount_amount") {
continue;
}
if (in_list(fields, k) && data[k] && (data.price_or_product_discount === 'Price' || k === 'pricing_rules')) {
frappe.model.set_value(d.doctype, d.name, k, data[k]);

View File

@@ -411,7 +411,11 @@ def _make_sales_order(source_name, target_doc=None, ignore_permissions=False):
2. If selections: Is Alternative Item/Has Alternative Item: Map if selected and adequate qty
3. If selections: Simple row: Map if adequate qty
"""
has_qty = item.qty > 0
balance_qty = item.qty - ordered_items.get(item.item_code, 0.0)
if balance_qty <= 0:
return False
has_qty = balance_qty
if not selected_rows:
return not item.is_alternative

View File

@@ -30,6 +30,38 @@ class TestQuotation(FrappeTestCase):
self.assertTrue(sales_order.get("payment_schedule"))
def test_do_not_add_ordered_items_in_new_sales_order(self):
from erpnext.selling.doctype.quotation.quotation import make_sales_order
from erpnext.stock.doctype.item.test_item import make_item
item = make_item("_Test Item for Quotation for SO", {"is_stock_item": 1})
quotation = make_quotation(qty=5, do_not_submit=True)
quotation.append(
"items",
{
"item_code": item.name,
"qty": 5,
"rate": 100,
"conversion_factor": 1,
"uom": item.stock_uom,
"warehouse": "_Test Warehouse - _TC",
"stock_uom": item.stock_uom,
},
)
quotation.submit()
sales_order = make_sales_order(quotation.name)
sales_order.delivery_date = nowdate()
self.assertEqual(len(sales_order.items), 2)
sales_order.remove(sales_order.items[1])
sales_order.submit()
sales_order = make_sales_order(quotation.name)
self.assertEqual(len(sales_order.items), 1)
self.assertEqual(sales_order.items[0].item_code, item.name)
self.assertEqual(sales_order.items[0].qty, 5.0)
def test_gross_profit(self):
from erpnext.stock.doctype.item.test_item import make_item
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry

View File

@@ -1687,13 +1687,13 @@ class TestSalesOrder(AccountsTestMixin, FrappeTestCase):
wo.submit()
make_stock_entry(
item_code="_Test Item",
target="Work In Progress - _TC",
target="_Test Warehouse - _TC",
qty=4,
basic_rate=100, # Stock RM
)
make_stock_entry(
item_code="_Test Item Home Desktop 100", # Stock RM
target="Work In Progress - _TC",
target="_Test Warehouse - _TC",
qty=4,
basic_rate=100,
)

View File

@@ -64,7 +64,7 @@ def search_by_term(search_term, warehouse, price_list):
}
if batch_no:
price_filters["batch_no"] = batch_no
price_filters["batch_no"] = ["in", [batch_no, ""]]
price = frappe.get_list(
doctype="Item Price",
@@ -74,15 +74,25 @@ def search_by_term(search_term, warehouse, price_list):
def __sort(p):
p_uom = p.get("uom")
p_batch = p.get("batch_no")
batch_no = item.get("batch_no")
if batch_no and p_batch and p_batch == batch_no:
if p_uom == item.get("uom"):
return 0
elif p_uom == item.get("stock_uom"):
return 1
else:
return 2
if p_uom == item.get("uom"):
return 0
return 3
elif p_uom == item.get("stock_uom"):
return 1
return 4
else:
return 2
return 5
# sort by fallback preference. always pick exact uom match if available
# sort by fallback preference. always pick exact uom and batch number match if available
price = sorted(price, key=__sort)
if len(price) > 0:

View File

@@ -75,6 +75,7 @@
"advance_payments_section",
"book_advance_payments_in_separate_party_account",
"reconcile_on_advance_payment_date",
"reconciliation_takes_effect_on",
"column_break_fwcf",
"default_advance_received_account",
"default_advance_paid_account",
@@ -796,6 +797,7 @@
"description": "If <b>Enabled</b> - Reconciliation happens on the <b>Advance Payment posting date</b><br>\nIf <b>Disabled</b> - Reconciliation happens on oldest of 2 Dates: <b>Invoice Date</b> or the <b>Advance Payment posting date</b><br>\n",
"fieldname": "reconcile_on_advance_payment_date",
"fieldtype": "Check",
"hidden": 1,
"label": "Reconcile on Advance Payment Date"
},
{
@@ -841,6 +843,13 @@
{
"fieldname": "column_break_dcdl",
"fieldtype": "Column Break"
},
{
"default": "Oldest Of Invoice Or Advance",
"fieldname": "reconciliation_takes_effect_on",
"fieldtype": "Select",
"label": "Reconciliation Takes Effect On",
"options": "Advance Payment Date\nOldest Of Invoice Or Advance\nReconciliation Date"
}
],
"icon": "fa fa-building",
@@ -848,7 +857,7 @@
"image_field": "company_logo",
"is_tree": 1,
"links": [],
"modified": "2024-12-02 15:37:32.723176",
"modified": "2025-01-09 20:12:25.471544",
"modified_by": "Administrator",
"module": "Setup",
"name": "Company",

View File

@@ -87,6 +87,9 @@ class Company(NestedSet):
payment_terms: DF.Link | None
phone_no: DF.Data | None
reconcile_on_advance_payment_date: DF.Check
reconciliation_takes_effect_on: DF.Literal[
"Advance Payment Date", "Oldest Of Invoice Or Advance", "Reconciliation Date"
]
registration_details: DF.Code | None
rgt: DF.Int
round_off_account: DF.Link | None
@@ -914,6 +917,14 @@ def get_default_company_address(name, sort_key="is_primary_address", existing_ad
return None
@frappe.whitelist()
def get_billing_shipping_address(name, billing_address=None, shipping_address=None):
primary_address = get_default_company_address(name, "is_primary_address", billing_address)
shipping_address = get_default_company_address(name, "is_shipping_address", shipping_address)
return {"primary_address": primary_address, "shipping_address": shipping_address}
@frappe.whitelist()
def create_transaction_deletion_request(company):
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (

View File

@@ -2,7 +2,7 @@
# License: GNU General Public License v3. See license.txt
from collections import defaultdict
from collections import OrderedDict, defaultdict
import frappe
from frappe import _
@@ -449,11 +449,14 @@ def get_available_batches(kwargs):
get_auto_batch_nos,
)
batchwise_qty = defaultdict(float)
batchwise_qty = OrderedDict()
batches = get_auto_batch_nos(kwargs)
for batch in batches:
batchwise_qty[batch.get("batch_no")] += batch.get("qty")
if batch.get("batch_no") not in batchwise_qty:
batchwise_qty[batch.get("batch_no")] = batch.get("qty")
else:
batchwise_qty[batch.get("batch_no")] += batch.get("qty")
return batchwise_qty

View File

@@ -2339,6 +2339,143 @@ class TestDeliveryNote(FrappeTestCase):
for d in bundle_data:
self.assertEqual(d.incoming_rate, serial_no_valuation[d.serial_no])
def test_auto_set_serial_batch_for_draft_dn(self):
frappe.db.set_single_value("Stock Settings", "auto_create_serial_and_batch_bundle_for_outward", 1)
frappe.db.set_single_value("Stock Settings", "pick_serial_and_batch_based_on", "FIFO")
batch_item = make_item(
"_Test Auto Set Serial Batch Draft DN",
properties={
"has_batch_no": 1,
"create_new_batch": 1,
"is_stock_item": 1,
"batch_number_series": "TAS-BASD-.#####",
},
)
serial_item = make_item(
"_Test Auto Set Serial Batch Draft DN Serial Item",
properties={"has_serial_no": 1, "is_stock_item": 1, "serial_no_series": "TAS-SASD-.#####"},
)
batch_serial_item = make_item(
"_Test Auto Set Serial Batch Draft DN Batch Serial Item",
properties={
"has_batch_no": 1,
"has_serial_no": 1,
"is_stock_item": 1,
"create_new_batch": 1,
"batch_number_series": "TAS-BSD-.#####",
"serial_no_series": "TAS-SSD-.#####",
},
)
for item in [batch_item, serial_item, batch_serial_item]:
make_stock_entry(item_code=item.name, target="_Test Warehouse - _TC", qty=5, basic_rate=100)
dn = create_delivery_note(
item_code=batch_item,
qty=5,
rate=500,
use_serial_batch_fields=1,
do_not_submit=True,
)
for item in [serial_item, batch_serial_item]:
dn.append(
"items",
{
"item_code": item.name,
"qty": 5,
"rate": 500,
"base_rate": 500,
"item_name": item.name,
"uom": "Nos",
"stock_uom": "Nos",
"conversion_factor": 1,
"warehouse": dn.items[0].warehouse,
"use_serial_batch_fields": 1,
},
)
dn.save()
for row in dn.items:
if row.item_code == batch_item.name:
self.assertTrue(row.batch_no)
if row.item_code == serial_item.name:
self.assertTrue(row.serial_no)
def test_delivery_note_return_for_batch_item_with_different_warehouse(self):
from erpnext.stock.doctype.delivery_note.delivery_note import make_sales_return
from erpnext.stock.doctype.warehouse.test_warehouse import create_warehouse
batch_item = make_item(
"_Test Delivery Note Return Valuation WITH Batch Item",
properties={
"has_batch_no": 1,
"create_new_batch": 1,
"is_stock_item": 1,
"batch_number_series": "BRTN-DNN-BIW-.#####",
},
).name
batches = []
for qty, rate in {5: 300}.items():
se = make_stock_entry(
item_code=batch_item, target="_Test Warehouse - _TC", qty=qty, basic_rate=rate
)
batches.append(get_batch_from_bundle(se.items[0].serial_and_batch_bundle))
warehouse = create_warehouse("Sales Return Test Warehouse 1", company="_Test Company")
dn = create_delivery_note(
item_code=batch_item,
qty=5,
rate=1000,
use_serial_batch_fields=1,
batch_no=batches[0],
do_not_submit=True,
)
self.assertEqual(dn.items[0].warehouse, "_Test Warehouse - _TC")
dn.save()
dn.submit()
dn.reload()
batch_no_valuation = defaultdict(float)
for row in dn.items:
if row.serial_and_batch_bundle:
bundle_data = frappe.get_all(
"Serial and Batch Entry",
filters={"parent": row.serial_and_batch_bundle},
fields=["incoming_rate", "serial_no", "batch_no"],
)
for d in bundle_data:
if d.batch_no:
batch_no_valuation[d.batch_no] = d.incoming_rate
return_entry = make_sales_return(dn.name)
return_entry.items[0].warehouse = warehouse
return_entry.save()
return_entry.submit()
return_entry.reload()
for row in return_entry.items:
self.assertEqual(row.warehouse, warehouse)
bundle_data = frappe.get_all(
"Serial and Batch Entry",
filters={"parent": row.serial_and_batch_bundle},
fields=["incoming_rate", "batch_no"],
)
for d in bundle_data:
self.assertEqual(d.incoming_rate, batch_no_valuation[d.batch_no])
def create_delivery_note(**args):
dn = frappe.new_doc("Delivery Note")

View File

@@ -251,7 +251,7 @@ class SerialandBatchBundle(Document):
return
if return_against := self.get_return_against(parent=parent):
self.set_valuation_rate_for_return_entry(return_against, save)
self.set_valuation_rate_for_return_entry(return_against, row, save)
elif self.type_of_transaction == "Outward":
self.set_incoming_rate_for_outward_transaction(
row, save, allow_negative_stock=allow_negative_stock
@@ -259,7 +259,7 @@ class SerialandBatchBundle(Document):
else:
self.set_incoming_rate_for_inward_transaction(row, save)
def set_valuation_rate_for_return_entry(self, return_against, save=False):
def set_valuation_rate_for_return_entry(self, return_against, row, save=False):
if valuation_details := self.get_valuation_rate_for_return_entry(return_against):
for row in self.entries:
if valuation_details:
@@ -281,6 +281,9 @@ class SerialandBatchBundle(Document):
}
)
elif self.type_of_transaction == "Inward":
self.set_incoming_rate_for_inward_transaction(row, save)
def validate_returned_serial_batch_no(self, return_against, row, original_inv_details):
if row.serial_no and row.serial_no not in original_inv_details["serial_nos"]:
self.throw_error_message(
@@ -297,6 +300,9 @@ class SerialandBatchBundle(Document):
)
def get_valuation_rate_for_return_entry(self, return_against):
if not self.voucher_detail_no:
return {}
valuation_details = frappe._dict(
{
"serial_nos": defaultdict(float),
@@ -304,6 +310,29 @@ class SerialandBatchBundle(Document):
}
)
field = {
"Sales Invoice": "sales_invoice_item",
"Purchase Invoice": "purchase_invoice_item",
"Delivery Note": "dn_detail",
"Purchase Receipt": "purchase_receipt_item",
}.get(self.voucher_type)
return_against_voucher_detail_no = frappe.db.get_value(
self.child_table, self.voucher_detail_no, field
)
filters = [
["Serial and Batch Bundle", "voucher_no", "=", return_against],
["Serial and Batch Entry", "docstatus", "=", 1],
["Serial and Batch Bundle", "is_cancelled", "=", 0],
["Serial and Batch Bundle", "item_code", "=", self.item_code],
["Serial and Batch Bundle", "voucher_detail_no", "=", return_against_voucher_detail_no],
]
if self.voucher_type in ["Purchase Receipt", "Purchase Invoice"]:
# Added to handle rejected warehouse case
filters.append(["Serial and Batch Entry", "warehouse", "=", self.warehouse])
bundle_data = frappe.get_all(
"Serial and Batch Bundle",
fields=[
@@ -311,13 +340,7 @@ class SerialandBatchBundle(Document):
"`tabSerial and Batch Entry`.`batch_no`",
"`tabSerial and Batch Entry`.`incoming_rate`",
],
filters=[
["Serial and Batch Bundle", "voucher_no", "=", return_against],
["Serial and Batch Entry", "docstatus", "=", 1],
["Serial and Batch Bundle", "is_cancelled", "=", 0],
["Serial and Batch Bundle", "item_code", "=", self.item_code],
["Serial and Batch Bundle", "warehouse", "=", self.warehouse],
],
filters=filters,
order_by="`tabSerial and Batch Bundle`.`creation`, `tabSerial and Batch Entry`.`idx`",
)

View File

@@ -115,6 +115,13 @@ def get_item_details(args, doc=None, for_validate=False, overwrite_warehouse=Tru
out.update(data)
if (
frappe.db.get_single_value("Stock Settings", "auto_create_serial_and_batch_bundle_for_outward")
and not args.get("serial_and_batch_bundle")
and (args.get("use_serial_batch_fields") or args.get("doctype") == "POS Invoice")
):
update_stock(args, out, doc)
if args.transaction_date and item.lead_time_days:
out.schedule_date = out.lead_time_date = add_days(args.transaction_date, item.lead_time_days)
@@ -168,6 +175,92 @@ def update_bin_details(args, out, doc):
out.update(bin_details)
def update_stock(ctx, out, doc=None):
from erpnext.stock.doctype.batch.batch import get_available_batches
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos_for_outward
if (
(
ctx.get("doctype") in ["Delivery Note", "POS Invoice"]
or (ctx.get("doctype") == "Sales Invoice" and ctx.get("update_stock"))
)
and out.warehouse
and out.stock_qty > 0
):
kwargs = frappe._dict(
{
"item_code": ctx.item_code,
"warehouse": ctx.warehouse,
"based_on": frappe.db.get_single_value("Stock Settings", "pick_serial_and_batch_based_on"),
}
)
if ctx.get("ignore_serial_nos"):
kwargs["ignore_serial_nos"] = ctx.get("ignore_serial_nos")
qty = out.stock_qty
batches = []
if out.has_batch_no and not ctx.get("batch_no"):
batches = get_available_batches(kwargs)
if doc:
filter_batches(batches, doc)
for batch_no, batch_qty in batches.items():
if batch_qty >= qty:
out.update({"batch_no": batch_no, "actual_batch_qty": qty})
break
else:
qty -= batch_qty
out.update({"batch_no": batch_no, "actual_batch_qty": batch_qty})
if out.has_serial_no and out.has_batch_no and has_incorrect_serial_nos(ctx, out):
kwargs["batches"] = [ctx.get("batch_no")] if ctx.get("batch_no") else [out.get("batch_no")]
serial_nos = get_serial_nos_for_outward(kwargs)
serial_nos = get_filtered_serial_nos(serial_nos, doc)
out["serial_no"] = "\n".join(serial_nos[: cint(out.stock_qty)])
elif out.has_serial_no and not ctx.get("serial_no"):
serial_nos = get_serial_nos_for_outward(kwargs)
serial_nos = get_filtered_serial_nos(serial_nos, doc)
out["serial_no"] = "\n".join(serial_nos[: cint(out.stock_qty)])
def has_incorrect_serial_nos(ctx, out):
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
if not ctx.get("serial_no"):
return True
serial_nos = get_serial_nos(ctx.get("serial_no"))
if len(serial_nos) != out.get("stock_qty"):
return True
return False
def filter_batches(batches, doc):
for row in doc.get("items"):
if row.get("batch_no") in batches:
batches[row.get("batch_no")] -= row.get("qty")
if batches[row.get("batch_no")] <= 0:
del batches[row.get("batch_no")]
def get_filtered_serial_nos(serial_nos, doc):
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
for row in doc.get("items"):
if row.get("serial_no"):
for serial_no in get_serial_nos(row.get("serial_no")):
if serial_no in serial_nos:
serial_nos.remove(serial_no)
return serial_nos
def process_args(args):
if isinstance(args, str):
args = json.loads(args)