Merge pull request #52347 from frappe/version-16-hotfix

chore: release v16
This commit is contained in:
ruthra kumar
2026-02-04 09:57:57 +05:30
committed by GitHub
99 changed files with 3652 additions and 1165 deletions

View File

@@ -60,7 +60,7 @@ body:
description: Share exact version number of Frappe and ERPNext you are using.
placeholder: |
Frappe version -
ERPNext Verion -
ERPNext version -
validations:
required: true

View File

@@ -7,6 +7,7 @@ on:
paths:
- "**.js"
- "**.css"
- "**.svg"
- "**.md"
- "**.html"
- 'crowdin.yml'

View File

@@ -1,3 +1,4 @@
<div align="center">
<a href="https://frappe.io/erpnext">
<img src="./erpnext/public/images/v16/erpnext.svg" alt="ERPNext Logo" height="80px" width="80xp"/>

View File

@@ -33,6 +33,17 @@
},
"account_number": "1151.000"
},
"Pajak Dibayar di Muka": {
"PPN Masukan": {
"account_number": "1152.001",
"account_type": "Tax"
},
"PPh 23 Dibayar di Muka": {
"account_number": "1152.002",
"account_type": "Tax"
},
"account_number": "1152.000"
},
"account_number": "1150.000"
},
"Kas": {
@@ -97,17 +108,6 @@
},
"account_number": "1130.000"
},
"Pajak Dibayar di Muka": {
"PPN Masukan": {
"account_number": "1151.001",
"account_type": "Tax"
},
"PPh 23 Dibayar di Muka": {
"account_number": "1152.001",
"account_type": "Tax"
},
"account_number": "1150.000"
},
"account_number": "1100.000"
},

View File

@@ -1691,6 +1691,10 @@ def get_exchange_rate(
credit=None,
exchange_rate=None,
):
# Ensure exchange_rate is always numeric to avoid calculation errors
if isinstance(exchange_rate, str):
exchange_rate = flt(exchange_rate) or 1
account_details = frappe.get_cached_value(
"Account", account, ["account_type", "root_type", "account_currency", "company"], as_dict=1
)

View File

@@ -1610,13 +1610,14 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"icon": "fa fa-file-text",
"is_submittable": 1,
"links": [],
"modified": "2025-08-04 22:22:31.471752",
"modified": "2026-01-29 21:20:51.376875",
"modified_by": "Administrator",
"module": "Accounts",
"name": "POS Invoice",

View File

@@ -898,6 +898,53 @@ class TestPOSInvoice(IntegrationTestCase):
if batch.batch_no == batch_no and batch.warehouse == "_Test Warehouse - _TC":
self.assertEqual(batch.qty, 5)
def test_pos_batch_reservation_with_return_qty(self):
"""
Test POS Invoice reserved qty for batch without bundle with return invoices.
"""
from erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle import (
get_auto_batch_nos,
)
from erpnext.stock.doctype.stock_reconciliation.test_stock_reconciliation import (
create_batch_item_with_batch,
)
create_batch_item_with_batch("_Batch Item Reserve Return", "TestBatch-RR 01")
se = make_stock_entry(
target="_Test Warehouse - _TC",
item_code="_Batch Item Reserve Return",
qty=30,
basic_rate=100,
)
se.reload()
batch_no = get_batch_from_bundle(se.items[0].serial_and_batch_bundle)
# POS Invoice for the batch without bundle
pos_inv = create_pos_invoice(item="_Batch Item Reserve Return", rate=300, qty=15, do_not_save=1)
pos_inv.append(
"payments",
{"mode_of_payment": "Cash", "amount": 4500},
)
pos_inv.items[0].batch_no = batch_no
pos_inv.save()
pos_inv.submit()
# POS Invoice return
pos_return = make_sales_return(pos_inv.name)
pos_return.insert()
pos_return.submit()
batches = get_auto_batch_nos(
frappe._dict({"item_code": "_Batch Item Reserve Return", "warehouse": "_Test Warehouse - _TC"})
)
for batch in batches:
if batch.batch_no == batch_no and batch.warehouse == "_Test Warehouse - _TC":
self.assertEqual(batch.qty, 30)
def test_pos_batch_item_qty_validation(self):
from erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle import (
BatchNegativeStockError,

View File

@@ -415,8 +415,9 @@ def reconcile(doc: None | str = None) -> None:
for x in allocations:
pr.append("allocation", x)
skip_ref_details_update_for_pe = check_multi_currency(pr)
# reconcile
pr.reconcile_allocations(skip_ref_details_update_for_pe=True)
pr.reconcile_allocations(skip_ref_details_update_for_pe=skip_ref_details_update_for_pe)
# If Payment Entry, update details only for newly linked references
# This is for performance
@@ -504,6 +505,37 @@ def reconcile(doc: None | str = None) -> None:
frappe.db.set_value("Process Payment Reconciliation", doc, "status", "Completed")
def check_multi_currency(pr_doc):
GL = frappe.qb.DocType("GL Entry")
Account = frappe.qb.DocType("Account")
def get_account_currency(voucher_type, voucher_no):
currency = (
frappe.qb.from_(GL)
.join(Account)
.on(GL.account == Account.name)
.select(Account.account_currency)
.where(
(GL.voucher_type == voucher_type)
& (GL.voucher_no == voucher_no)
& (Account.account_type.isin(["Payable", "Receivable"]))
)
.limit(1)
).run(as_dict=True)
return currency[0].account_currency if currency else None
for allocation in pr_doc.allocation:
reference_currency = get_account_currency(allocation.reference_type, allocation.reference_name)
invoice_currency = get_account_currency(allocation.invoice_type, allocation.invoice_number)
if reference_currency != invoice_currency:
return True
return False
@frappe.whitelist()
def is_any_doc_running(for_filter: str | dict | None = None) -> str | None:
running_doc = None

View File

@@ -1625,7 +1625,8 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
},
{
"collapsible": 1,
@@ -1667,7 +1668,7 @@
"idx": 204,
"is_submittable": 1,
"links": [],
"modified": "2025-12-15 06:41:38.237728",
"modified": "2026-01-29 21:21:53.051193",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Purchase Invoice",

View File

@@ -36,7 +36,7 @@ from erpnext.accounts.utils import get_account_currency, get_fiscal_year, update
from erpnext.assets.doctype.asset.asset import is_cwip_accounting_enabled
from erpnext.assets.doctype.asset_category.asset_category import get_asset_category_account
from erpnext.buying.utils import check_on_hold_or_closed_status
from erpnext.controllers.accounts_controller import validate_account_head
from erpnext.controllers.accounts_controller import merge_taxes, validate_account_head
from erpnext.controllers.buying_controller import BuyingController
from erpnext.stock.doctype.purchase_receipt.purchase_receipt import (
update_billed_amount_based_on_po,
@@ -2005,9 +2005,17 @@ def make_purchase_receipt(source_name, target_doc=None, args=None):
args = json.loads(args)
def post_parent_process(source_parent, target_parent):
for row in target_parent.get("items"):
if row.get("qty") == 0:
target_parent.remove(row)
remove_items_with_zero_qty(target_parent)
set_missing_values(source_parent, target_parent)
def remove_items_with_zero_qty(target_parent):
target_parent.items = [row for row in target_parent.get("items") if row.get("qty") != 0]
def set_missing_values(source_parent, target_parent):
target_parent.run_method("set_missing_values")
if args and args.get("merge_taxes"):
merge_taxes(source_parent, target_parent)
target_parent.run_method("calculate_taxes_and_totals")
def update_item(obj, target, source_parent):
from erpnext.controllers.sales_and_purchase_return import get_returned_qty_map_for_row
@@ -2059,7 +2067,11 @@ def make_purchase_receipt(source_name, target_doc=None, args=None):
"postprocess": update_item,
"condition": lambda doc: abs(doc.received_qty) < abs(doc.qty) and select_item(doc),
},
"Purchase Taxes and Charges": {"doctype": "Purchase Taxes and Charges"},
"Purchase Taxes and Charges": {
"doctype": "Purchase Taxes and Charges",
"reset_value": not (args and args.get("merge_taxes")),
"ignore": args.get("merge_taxes") if args else 0,
},
},
target_doc,
post_parent_process,

View File

@@ -44,6 +44,7 @@ erpnext.accounts.SalesInvoiceController = class SalesInvoiceController extends (
"Unreconcile Payment Entries",
"Serial and Batch Bundle",
"Bank Transaction",
"Packing Slip",
];
if (!this.frm.doc.__islocal && !this.frm.doc.customer && this.frm.doc.debit_to) {

View File

@@ -1,6 +1,7 @@
{
"actions": [],
"allow_import": 1,
"allow_rename": 1,
"autoname": "naming_series:",
"creation": "2022-01-25 10:29:57.771398",
"doctype": "DocType",
@@ -2250,7 +2251,8 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
},
{
"default": "0",
@@ -2304,7 +2306,7 @@
"link_fieldname": "consolidated_invoice"
}
],
"modified": "2025-12-24 18:29:50.242618",
"modified": "2026-01-30 16:45:59.682473",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Sales Invoice",

View File

@@ -4745,6 +4745,66 @@ class TestSalesInvoice(ERPNextTestSuite):
doc.db_set("do_not_use_batchwise_valuation", original_value)
@change_settings("Selling Settings", {"set_zero_rate_for_expired_batch": True})
def test_zero_valuation_for_standalone_credit_note_with_expired_batch(self):
item_code = "_Test Item for Expiry Batch Zero Valuation"
make_item_for_si(
item_code,
{
"is_stock_item": 1,
"has_batch_no": 1,
"has_expiry_date": 1,
"shelf_life_in_days": 2,
"create_new_batch": 1,
"batch_number_series": "TBATCH-EBZV.####",
},
)
se = make_stock_entry(
item_code=item_code,
qty=10,
target="_Test Warehouse - _TC",
rate=100,
)
# fetch batch no from bundle
batch_no = get_batch_from_bundle(se.items[0].serial_and_batch_bundle)
si = create_sales_invoice(
posting_date=add_days(nowdate(), 3),
item=item_code,
qty=-10,
rate=100,
is_return=1,
update_stock=1,
use_serial_batch_fields=1,
do_not_save=1,
do_not_submit=1,
)
si.items[0].batch_no = batch_no
si.save()
si.submit()
si.reload()
# check zero incoming rate in voucher
self.assertEqual(si.items[0].incoming_rate, 0.0)
# chekc zero incoming rate in stock ledger
stock_ledger_entry = frappe.db.get_value(
"Stock Ledger Entry",
{
"voucher_type": "Sales Invoice",
"voucher_no": si.name,
"item_code": item_code,
"warehouse": "_Test Warehouse - _TC",
},
["incoming_rate", "valuation_rate"],
as_dict=True,
)
self.assertEqual(stock_ledger_entry.incoming_rate, 0.0)
def make_item_for_si(item_code, properties=None):
from erpnext.stock.doctype.item.test_item import make_item

View File

@@ -43,16 +43,18 @@
"read_only": 1
}
],
"grid_page_length": 50,
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-03-27 13:10:55.008837",
"modified": "2025-11-14 16:17:25.584675",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Transaction Deletion Record Details",
"owner": "Administrator",
"permissions": [],
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": []
}
}

View File

@@ -17,7 +17,7 @@
</div>
<div class="col-xs-6">
<table>
<tr><td><strong>Date: </strong></td><td>{{ frappe.utils.format_date(doc.creation) }}</td></tr>
<tr><td><strong>Date: </strong></td><td>{{ frappe.utils.format_date(doc.posting_date) }}</td></tr>
</table>
</div>
</div>

View File

@@ -163,11 +163,11 @@ def get_net_profit_loss(income, expense, period_list, company, currency=None, co
def get_chart_data(filters, columns, income, expense, net_profit_loss, currency):
labels = [d.get("label") for d in columns[2:]]
labels = [d.get("label") for d in columns[4:]]
income_data, expense_data, net_profit = [], [], []
for p in columns[2:]:
for p in columns[4:]:
if income:
income_data.append(income[-2].get(p.get("fieldname")))
if expense:

View File

@@ -11,6 +11,7 @@ import frappe.defaults
from frappe import _, qb, throw
from frappe.desk.reportview import build_match_conditions
from frappe.model.meta import get_field_precision
from frappe.model.naming import determine_consecutive_week_number
from frappe.query_builder import AliasedQuery, Case, Criterion, Field, Table
from frappe.query_builder.functions import Count, IfNull, Max, Round, Sum
from frappe.query_builder.utils import DocType
@@ -25,6 +26,7 @@ from frappe.utils import (
get_number_format_info,
getdate,
now,
now_datetime,
nowdate,
)
from frappe.utils.caching import site_cache
@@ -66,6 +68,7 @@ def get_fiscal_year(
as_dict=False,
boolean=None,
raise_on_missing=True,
truncate=False,
):
if isinstance(raise_on_missing, str):
raise_on_missing = loads(raise_on_missing)
@@ -79,7 +82,14 @@ def get_fiscal_year(
fiscal_years = get_fiscal_years(
date, fiscal_year, label, verbose, company, as_dict=as_dict, raise_on_missing=raise_on_missing
)
return False if not fiscal_years else fiscal_years[0]
if fiscal_years:
fiscal_year = fiscal_years[0]
if truncate:
return ("-".join(y[-2:] for y in fiscal_year[0].split("-")), fiscal_year[1], fiscal_year[2])
return fiscal_year
return False
def get_fiscal_years(
@@ -1501,14 +1511,14 @@ def get_autoname_with_number(number_value, doc_title, company):
def parse_naming_series_variable(doc, variable):
if variable == "FY":
if variable in ["FY", "TFY"]:
if doc:
date = doc.get("posting_date") or doc.get("transaction_date") or getdate()
company = doc.get("company")
else:
date = getdate()
company = None
return get_fiscal_year(date=date, company=company)[0]
return get_fiscal_year(date=date, company=company, truncate=variable == "TFY")[0]
elif variable == "ABBR":
if doc:
@@ -1518,6 +1528,18 @@ def parse_naming_series_variable(doc, variable):
return frappe.db.get_value("Company", company, "abbr") if company else ""
else:
data = {"YY": "%y", "YYYY": "%Y", "MM": "%m", "DD": "%d", "JJJ": "%j"}
date = (
(
getdate(doc.get("posting_date") or doc.get("transaction_date") or doc.get("posting_datetime"))
or now_datetime()
)
if frappe.get_single_value("Global Defaults", "use_posting_datetime_for_naming_documents")
else now_datetime()
)
return date.strftime(data[variable]) if variable in data else determine_consecutive_week_number(date)
@frappe.whitelist()
def get_coa(doctype, parent, is_root=None, chart=None):

View File

@@ -246,7 +246,9 @@ def _make_journal_entry_for_depreciation(
def setup_journal_entry_metadata(je, depr_schedule_doc, depr_series, depr_schedule, asset):
je.voucher_type = "Depreciation Entry"
je.naming_series = depr_series
if depr_series:
je.naming_series = depr_series
je.posting_date = depr_schedule.schedule_date
je.company = asset.company
je.finance_book = depr_schedule_doc.finance_book

View File

@@ -803,7 +803,7 @@ frappe.ui.form.on("Purchase Order", "is_subcontracted", function (frm) {
function prevent_past_schedule_dates(frm) {
if (frm.doc.transaction_date) {
frm.fields_dict["schedule_date"].datepicker.update({
frm.fields_dict["schedule_date"].datepicker?.update({
minDate: new Date(frm.doc.transaction_date),
});
}

View File

@@ -1301,7 +1301,8 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"grid_page_length": 50,
@@ -1309,7 +1310,7 @@
"idx": 105,
"is_submittable": 1,
"links": [],
"modified": "2025-09-28 11:00:56.635116",
"modified": "2026-01-29 21:22:54.323838",
"modified_by": "Administrator",
"module": "Buying",
"name": "Purchase Order",

View File

@@ -149,6 +149,7 @@ class PurchaseOrder(BuyingController):
supplied_items: DF.Table[PurchaseOrderItemSupplied]
supplier: DF.Link
supplier_address: DF.Link | None
supplier_group: DF.Link | None
supplier_name: DF.Data | None
supplier_warehouse: DF.Link | None
tax_category: DF.Link | None

View File

@@ -304,12 +304,17 @@ class RequestforQuotation(BuyingController):
else:
sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
rendered_message = frappe.render_template(self.message_for_supplier, doc_args)
subject_source = (
self.subject
or frappe.get_value("Email Template", self.email_template, "subject")
or _("Request for Quotation")
)
rendered_subject = frappe.render_template(subject_source, doc_args)
if preview:
return {
"message": self.message_for_supplier,
"subject": self.subject
or frappe.get_value("Email Template", self.email_template, "subject")
or _("Request for Quotation"),
"message": rendered_message,
"subject": rendered_subject,
}
attachments = []
@@ -333,10 +338,8 @@ class RequestforQuotation(BuyingController):
self.send_email(
data,
sender,
self.subject
or frappe.get_value("Email Template", self.email_template, "subject")
or _("Request for Quotation"),
self.message_for_supplier,
rendered_subject,
rendered_message,
attachments,
)

View File

@@ -63,7 +63,6 @@
"fieldtype": "Column Break"
},
{
"fetch_from": "item_code.item_name",
"fieldname": "item_name",
"fieldtype": "Data",
"in_global_search": 1,
@@ -262,7 +261,7 @@
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2025-04-28 23:30:22.927989",
"modified": "2026-01-31 19:46:27.884592",
"modified_by": "Administrator",
"module": "Buying",
"name": "Request for Quotation Item",

View File

@@ -938,7 +938,8 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"grid_page_length": 50,
@@ -947,7 +948,7 @@
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2025-07-23 02:22:43.526822",
"modified": "2026-01-29 21:23:13.778468",
"modified_by": "Administrator",
"module": "Buying",
"name": "Supplier Quotation",

View File

@@ -12,7 +12,7 @@ from frappe.utils import cint, flt, format_datetime, get_datetime
import erpnext
from erpnext.stock.serial_batch_bundle import get_batches_from_bundle
from erpnext.stock.utils import get_combine_datetime, get_incoming_rate, get_valuation_method
from erpnext.stock.utils import get_combine_datetime, get_incoming_rate, get_valuation_method, getdate
class StockOverReturnError(frappe.ValidationError):
@@ -759,6 +759,29 @@ def get_rate_for_return(
StockLedgerEntry = frappe.qb.DocType("Stock Ledger Entry")
select_field = Abs(StockLedgerEntry.stock_value_difference / StockLedgerEntry.actual_qty)
item_details = frappe.get_cached_value("Item", item_code, ["has_batch_no", "has_expiry_date"], as_dict=1)
set_zero_rate_for_expired_batch = frappe.db.get_single_value(
"Selling Settings", "set_zero_rate_for_expired_batch"
)
if (
set_zero_rate_for_expired_batch
and item_details.has_batch_no
and item_details.has_expiry_date
and not return_against
and voucher_type in ["Sales Invoice", "Delivery Note"]
):
# set incoming_rate zero explicitly for standalone credit note with expired batch
batch_no = frappe.db.get_value(f"{voucher_type} Item", voucher_detail_no, "batch_no")
if batch_no and is_batch_expired(batch_no, sle.get("posting_date")):
frappe.db.set_value(
voucher_type + " Item",
voucher_detail_no,
"incoming_rate",
0,
)
return 0
rate = flt(frappe.db.get_value("Stock Ledger Entry", filters, select_field))
if not (rate and return_against) and voucher_type in ["Sales Invoice", "Delivery Note"]:
rate = frappe.db.get_value(f"{voucher_type} Item", voucher_detail_no, "incoming_rate")
@@ -823,12 +846,34 @@ def get_filters(
if reference_voucher_detail_no:
filters["voucher_detail_no"] = reference_voucher_detail_no
if voucher_type in ["Purchase Receipt", "Purchase Invoice"] and item_row and item_row.get("warehouse"):
filters["warehouse"] = item_row.get("warehouse")
warehouses = []
if voucher_type in ["Purchase Receipt", "Purchase Invoice"] and item_row:
if reference_voucher_detail_no:
warehouses = get_warehouses_for_return(voucher_type, reference_voucher_detail_no)
if item_row.get("warehouse") and item_row.get("warehouse") in warehouses:
filters["warehouse"] = item_row.get("warehouse")
return filters
def get_warehouses_for_return(voucher_type, name):
warehouses = []
warehouse_details = frappe.get_all(
voucher_type + " Item",
filters={"name": name, "docstatus": 1},
fields=["warehouse", "rejected_warehouse"],
)
for d in warehouse_details:
if d.warehouse:
warehouses.append(d.warehouse)
if d.rejected_warehouse:
warehouses.append(d.rejected_warehouse)
return warehouses
def get_returned_serial_nos(child_doc, parent_doc, serial_no_field=None, ignore_voucher_detail_no=None):
from erpnext.stock.doctype.serial_no.serial_no import (
get_serial_nos as get_serial_nos_from_serial_no,
@@ -1276,3 +1321,17 @@ def get_sales_invoice_item_from_consolidated_invoice(return_against_pos_invoice,
return result[0].name if result else None
except Exception:
return None
def is_batch_expired(batch_no, posting_date):
"""
To check whether the batch is expired or not based on the posting date.
"""
expiry_date = frappe.db.get_value("Batch", batch_no, "expiry_date")
if not expiry_date:
return
if getdate(posting_date) > getdate(expiry_date):
return True
return False

View File

@@ -8,7 +8,7 @@ from frappe.utils import cint, flt, get_link_to_form, nowtime
from erpnext.accounts.party import render_address
from erpnext.controllers.accounts_controller import get_taxes_and_charges
from erpnext.controllers.sales_and_purchase_return import get_rate_for_return
from erpnext.controllers.sales_and_purchase_return import get_rate_for_return, is_batch_expired
from erpnext.controllers.stock_controller import StockController
from erpnext.stock.doctype.item.item import set_item_default
from erpnext.stock.get_item_details import get_bin_details, get_conversion_factor
@@ -296,7 +296,7 @@ class SellingController(StockController):
_(
"""Row #{0}: Selling rate for item {1} is lower than its {2}.
Selling {3} should be atleast {4}.<br><br>Alternatively,
you can disable selling price validation in {5} to bypass
you can disable '{5}' in {6} to bypass
this validation."""
).format(
idx,
@@ -304,7 +304,8 @@ class SellingController(StockController):
bold(ref_rate_field),
bold("net rate"),
bold(rate),
get_link_to_form("Selling Settings", "Selling Settings"),
bold(frappe.get_meta("Selling Settings").get_label("validate_selling_price")),
get_link_to_form("Selling Settings"),
),
title=_("Invalid Selling Price"),
)
@@ -313,7 +314,6 @@ class SellingController(StockController):
return
is_internal_customer = self.get("is_internal_customer")
valuation_rate_map = {}
for item in self.items:
if not item.item_code or item.is_free_item:
@@ -323,7 +323,9 @@ class SellingController(StockController):
"Item", item.item_code, ("last_purchase_rate", "is_stock_item")
)
last_purchase_rate_in_sales_uom = last_purchase_rate * (item.conversion_factor or 1)
last_purchase_rate_in_sales_uom = flt(
last_purchase_rate * (item.conversion_factor or 1), item.precision("base_net_rate")
)
if flt(item.base_net_rate) < flt(last_purchase_rate_in_sales_uom):
throw_message(item.idx, item.item_name, last_purchase_rate_in_sales_uom, "last purchase rate")
@@ -331,50 +333,16 @@ class SellingController(StockController):
if is_internal_customer or not is_stock_item:
continue
valuation_rate_map[(item.item_code, item.warehouse)] = None
if not valuation_rate_map:
return
or_conditions = (
f"""(item_code = {frappe.db.escape(valuation_rate[0])}
and warehouse = {frappe.db.escape(valuation_rate[1])})"""
for valuation_rate in valuation_rate_map
)
valuation_rates = frappe.db.sql(
f"""
select
item_code, warehouse, valuation_rate
from
`tabBin`
where
({" or ".join(or_conditions)})
and valuation_rate > 0
""",
as_dict=True,
)
for rate in valuation_rates:
valuation_rate_map[(rate.item_code, rate.warehouse)] = rate.valuation_rate
for item in self.items:
if not item.item_code or item.is_free_item:
continue
last_valuation_rate = valuation_rate_map.get((item.item_code, item.warehouse))
if not last_valuation_rate:
continue
last_valuation_rate_in_sales_uom = last_valuation_rate * (item.conversion_factor or 1)
if flt(item.base_net_rate) < flt(last_valuation_rate_in_sales_uom):
if item.get("incoming_rate") and item.base_net_rate < (
valuation_rate := flt(
item.incoming_rate * (item.conversion_factor or 1), item.precision("base_net_rate")
)
):
throw_message(
item.idx,
item.item_name,
last_valuation_rate_in_sales_uom,
"valuation rate (Moving Average)",
valuation_rate,
"valuation rate",
)
def get_item_list(self):
@@ -533,19 +501,37 @@ class SellingController(StockController):
if self.doctype not in ("Delivery Note", "Sales Invoice"):
return
from erpnext.stock.serial_batch_bundle import get_batch_nos
allow_at_arms_length_price = frappe.get_cached_value(
"Stock Settings", None, "allow_internal_transfer_at_arms_length_price"
)
set_zero_rate_for_expired_batch = frappe.db.get_single_value(
"Selling Settings", "set_zero_rate_for_expired_batch"
)
old_doc = self.get_doc_before_save()
items = self.get("items") + (self.get("packed_items") or [])
for d in items:
if not frappe.get_cached_value("Item", d.item_code, "is_stock_item"):
continue
item_details = frappe.get_cached_value(
"Item", d.item_code, ["has_serial_no", "has_batch_no"], as_dict=1
"Item", d.item_code, ["has_serial_no", "has_batch_no", "has_expiry_date"], as_dict=1
)
if not self.get("return_against") or (
if (
set_zero_rate_for_expired_batch
and item_details.has_batch_no
and item_details.has_expiry_date
and self.get("is_return")
and not self.get("return_against")
and is_batch_expired(d.batch_no, self.get("posting_date"))
):
# set incoming rate as zero for stand-lone credit note with expired batch
d.incoming_rate = 0
elif not self.get("return_against") or (
get_valuation_method(d.item_code, self.company) == "Moving Average"
and self.get("is_return")
and not item_details.has_serial_no
@@ -554,6 +540,29 @@ class SellingController(StockController):
# Get incoming rate based on original item cost based on valuation method
qty = flt(d.get("stock_qty") or d.get("actual_qty") or d.get("qty"))
if old_doc:
old_item = next(
(
item
for item in (old_doc.get("items") + (old_doc.get("packed_items") or []))
if item.name == d.name
),
None,
)
if old_item:
old_qty = flt(
old_item.get("stock_qty") or old_item.get("actual_qty") or old_item.get("qty")
)
if (
old_item.item_code != d.item_code
or old_item.warehouse != d.warehouse
or old_qty != qty
or old_item.batch_no != d.batch_no
or get_batch_nos(old_item.serial_and_batch_bundle)
!= get_batch_nos(d.serial_and_batch_bundle)
):
d.incoming_rate = 0
if (
not d.incoming_rate
or self.is_internal_transfer()

View File

@@ -91,7 +91,8 @@ status_map = {
],
"Delivery Note": [
["Draft", None],
["To Bill", "eval:self.per_billed < 100 and self.docstatus == 1"],
["To Bill", "eval:self.per_billed == 0 and self.docstatus == 1"],
["Partially Billed", "eval:self.per_billed < 100 and self.per_billed > 0 and self.docstatus == 1"],
["Completed", "eval:self.per_billed == 100 and self.docstatus == 1"],
["Return Issued", "eval:self.per_returned == 100 and self.docstatus == 1"],
["Return", "eval:self.is_return == 1 and self.per_billed == 0 and self.docstatus == 1"],
@@ -443,7 +444,7 @@ class StatusUpdater(Document):
):
return
if args["source_dt"] != "Pick List Item":
if args["source_dt"] != "Pick List Item" and args["target_dt"] != "Quotation Item":
if qty_or_amount == "qty":
action_msg = _(
'To allow over receipt / delivery, update "Over Receipt/Delivery Allowance" in Stock Settings or the Item.'

View File

@@ -552,7 +552,10 @@ class StockController(AccountsController):
if is_rejected:
serial_nos = row.get("rejected_serial_no")
type_of_transaction = "Inward" if not self.is_return else "Outward"
qty = row.get("rejected_qty") * row.get("conversion_factor", 1.0)
qty = flt(
row.get("rejected_qty") * row.get("conversion_factor", 1.0),
frappe.get_precision("Serial and Batch Entry", "qty"),
)
warehouse = row.get("rejected_warehouse")
if (

View File

@@ -313,10 +313,10 @@ class SubcontractingController(StockController):
):
for row in frappe.get_all(
f"{self.subcontract_data.order_doctype} Item",
fields=["item_code", {"SUB": ["qty", "received_qty"], "as": "qty"}, "parent", "name"],
fields=["item_code", {"SUB": ["qty", "received_qty"], "as": "qty"}, "parent", "bom"],
filters={"docstatus": 1, "parent": ("in", self.subcontract_orders)},
):
self.qty_to_be_received[(row.item_code, row.parent)] += row.qty
self.qty_to_be_received[(row.item_code, row.parent, row.bom)] += row.qty
def __get_transferred_items(self):
se = frappe.qb.DocType("Stock Entry")
@@ -923,13 +923,17 @@ class SubcontractingController(StockController):
self.__set_serial_nos(item_row, rm_obj)
def __get_qty_based_on_material_transfer(self, item_row, transfer_item):
key = (item_row.item_code, item_row.get(self.subcontract_data.order_field))
key = (
item_row.item_code,
item_row.get(self.subcontract_data.order_field),
item_row.get("bom"),
)
if self.qty_to_be_received == item_row.qty:
return transfer_item.qty
if self.qty_to_be_received:
qty = (flt(item_row.qty) * flt(transfer_item.qty)) / flt(self.qty_to_be_received.get(key, 0))
if self.qty_to_be_received.get(key):
qty = (flt(item_row.qty) * flt(transfer_item.qty)) / flt(self.qty_to_be_received.get(key))
transfer_item.item_details.required_qty = transfer_item.qty
if transfer_item.serial_no or frappe.get_cached_value(
@@ -978,7 +982,11 @@ class SubcontractingController(StockController):
if self.qty_to_be_received:
self.qty_to_be_received[
(row.item_code, row.get(self.subcontract_data.order_field))
(
row.item_code,
row.get(self.subcontract_data.order_field),
row.get("bom"),
)
] -= row.qty
def __set_rate_for_serial_and_batch_bundle(self):

View File

@@ -2,8 +2,6 @@
# For license information, please see license.txt
from datetime import datetime
import frappe
from frappe import qb
from frappe.query_builder.functions import Sum
@@ -2480,3 +2478,21 @@ class TestAccountsController(IntegrationTestCase):
self.assertRaises(frappe.ValidationError, po.save)
po.items[0].delivered_by_supplier = 1
po.save()
@IntegrationTestCase.change_settings("Global Defaults", {"use_posting_datetime_for_naming_documents": 1})
def test_document_naming_rule_based_on_posting_date(self):
frappe.new_doc(
"Document Naming Rule", document_type="Sales Invoice", prefix="SI-.MM.-.YYYY.-"
).submit()
si = create_sales_invoice(do_not_save=True)
si.set_posting_time = 1
si.posting_date = "2025-12-31"
si.save()
self.assertEqual(si.name, "SI-12-2025-00001")
si = create_sales_invoice(do_not_save=True)
si.set_posting_time = 1
si.posting_date = "2026-01-01"
si.save()
self.assertEqual(si.name, "SI-01-2026-00002")

View File

@@ -402,9 +402,10 @@ doc_events = {
}
# function should expect the variable and doc as arguments
naming_series_variables_list = ["FY", "TFY", "ABBR", "MM", "DD", "YY", "YYYY", "JJJ", "WW"]
naming_series_variables = {
"FY": "erpnext.accounts.utils.parse_naming_series_variable",
"ABBR": "erpnext.accounts.utils.parse_naming_series_variable",
variable: "erpnext.accounts.utils.parse_naming_series_variable"
for variable in naming_series_variables_list
}
# On cancel event Payment Entry will be exempted and all linked submittable doctype will get cancelled.

File diff suppressed because it is too large Load Diff

View File

@@ -747,19 +747,21 @@ class ProductionPlan(Document):
"project": self.project,
}
key = (d.item_code, d.sales_order, d.sales_order_item, d.warehouse)
key = (d.item_code, d.sales_order, d.sales_order_item, d.warehouse, d.planned_start_date)
if self.combine_items:
key = (d.item_code, d.sales_order, d.warehouse)
key = (d.item_code, d.sales_order, d.warehouse, d.planned_start_date)
if not d.sales_order:
key = (d.name, d.item_code, d.warehouse)
key = (d.name, d.item_code, d.warehouse, d.planned_start_date)
if not item_details["project"] and d.sales_order:
item_details["project"] = frappe.get_cached_value("Sales Order", d.sales_order, "project")
if self.get_items_from == "Material Request":
item_details.update({"qty": d.planned_qty})
item_dict[(d.item_code, d.material_request_item, d.warehouse)] = item_details
item_dict[
(d.item_code, d.material_request_item, d.warehouse, d.planned_start_date)
] = item_details
else:
item_details.update(
{

View File

@@ -999,7 +999,7 @@ class TestProductionPlan(IntegrationTestCase):
items_data = pln.get_production_items()
# Update qty
items_data[(pln.po_items[0].name, item, None)]["qty"] = qty
items_data[(pln.po_items[0].name, item, None, pln.po_items[0].planned_start_date)]["qty"] = qty
# Create and Submit Work Order for each item in items_data
for _key, item in items_data.items():

View File

@@ -710,7 +710,7 @@ erpnext.work_order = {
set_custom_buttons: function (frm) {
var doc = frm.doc;
if (doc.docstatus === 1 && doc.status !== "Closed") {
if (doc.docstatus === 1 && !["Closed", "Completed"].includes(doc.status)) {
frm.add_custom_button(
__("Close"),
function () {
@@ -720,9 +720,6 @@ erpnext.work_order = {
},
__("Status")
);
}
if (doc.docstatus === 1 && !["Closed", "Completed"].includes(doc.status)) {
if (doc.status != "Stopped" && doc.status != "Completed") {
frm.add_custom_button(
__("Stop"),

View File

@@ -454,7 +454,6 @@ class MaterialRequirementsPlanningReport:
row[field] = rm_details.get(field)
self.update_required_qty(row)
row.release_date = add_days(row.delivery_date, row.lead_time * -1)
if i != 0:
data.append(frappe._dict({}))
@@ -463,7 +462,15 @@ class MaterialRequirementsPlanningReport:
if rm_details.raw_materials:
row.capacity = get_item_capacity(row.item_code, self.filters.bucket_size)
row.type_of_material = "Manufacture"
if row.lead_time and row.required_qty:
row.lead_time = math.ceil(row.required_qty / row.lead_time)
elif not row.required_qty:
row.lead_time = 0
if not row.lead_time and rm_details.raw_materials:
row.lead_time = self.get_lead_time_from_raw_materials(rm_details.raw_materials)
row.release_date = add_days(row.delivery_date, row.lead_time * -1)
data.append(row)
if rm_details.raw_materials:
self.update_rm_details(
@@ -472,6 +479,15 @@ class MaterialRequirementsPlanningReport:
return data
def get_lead_time_from_raw_materials(self, raw_materials):
lead_time = 0
for material in raw_materials:
lead_time += math.ceil(material.lead_time)
if material.raw_materials:
lead_time += self.get_lead_time_from_raw_materials(material.raw_materials)
return lead_time
def add_non_planned_so(self, row):
if so_details := self._so_details.get((row.item_code, row.delivery_date)):
row.adhoc_qty = so_details.qty
@@ -1199,8 +1215,10 @@ def get_item_lead_time(item_code, type_of_material):
if type_of_material == "Manufacture":
query = query.select(
Case()
.when(doctype.manufacturing_time_in_mins.isnull(), 0)
.else_(doctype.manufacturing_time_in_mins / 1440 + doctype.buffer_time)
.when(
(doctype.manufacturing_time_in_mins.isnull() | (doctype.manufacturing_time_in_mins <= 0)), 0
)
.else_(1440 / doctype.manufacturing_time_in_mins + doctype.buffer_time)
.as_("lead_time")
)
else:

View File

@@ -459,3 +459,6 @@ erpnext.patches.v16_0.fix_barcode_typo
erpnext.patches.v16_0.set_post_change_gl_entries_on_pos_settings
execute:frappe.delete_doc_if_exists("Workspace Sidebar", "Opening & Closing")
erpnext.patches.v15_0.create_accounting_dimensions_in_advance_taxes_and_charges
erpnext.patches.v16_0.set_ordered_qty_in_quotation_item
erpnext.patches.v16_0.migrate_transaction_deletion_task_flags_to_status # 2
erpnext.patches.v16_0.update_company_custom_field_in_bin

View File

@@ -0,0 +1,42 @@
import frappe
def execute():
"""
Migrate Transaction Deletion Record boolean task flags to status Select fields.
Renames fields from old names to new names with _status suffix.
Maps: 0 -> "Pending", 1 -> "Completed"
"""
if not frappe.db.table_exists("tabTransaction Deletion Record"):
return
# Field mapping: old boolean field name -> new status field name
field_mapping = {
"delete_bin_data": "delete_bin_data_status",
"delete_leads_and_addresses": "delete_leads_and_addresses_status",
"reset_company_default_values": "reset_company_default_values_status",
"clear_notifications": "clear_notifications_status",
"initialize_doctypes_table": "initialize_doctypes_table_status",
"delete_transactions": "delete_transactions_status",
}
# Get all Transaction Deletion Records
records = frappe.db.get_all("Transaction Deletion Record", pluck="name")
for name in records or []:
updates = {}
for old_field, new_field in field_mapping.items():
# Read from old boolean field
current_value = frappe.db.get_value("Transaction Deletion Record", name, old_field)
# Map to new status and write to new field name
if current_value in (1, "1", True):
updates[new_field] = "Completed"
else:
# Handle 0, "0", False, None, empty string
updates[new_field] = "Pending"
# Update all fields at once
if updates:
frappe.db.set_value("Transaction Deletion Record", name, updates, update_modified=False)

View File

@@ -0,0 +1,16 @@
import frappe
def execute():
data = frappe.get_all(
"Sales Order Item",
filters={"quotation_item": ["is", "set"], "docstatus": 1},
fields=["quotation_item", {"SUM": "stock_qty", "as": "ordered_qty"}],
group_by="quotation_item",
)
if data:
frappe.db.auto_commit_on_many_writes = 1
frappe.db.bulk_update(
"Quotation Item", {d.quotation_item: {"ordered_qty": d.ordered_qty} for d in data}
)
frappe.db.auto_commit_on_many_writes = 0

View File

@@ -0,0 +1,14 @@
import frappe
def execute():
frappe.reload_doc("stock", "doctype", "bin")
frappe.db.sql(
"""
UPDATE `tabBin` b
INNER JOIN `tabWarehouse` w ON b.warehouse = w.name
SET b.company = w.company
WHERE b.company IS NULL OR b.company = ''
"""
)

View File

@@ -308,6 +308,8 @@ class Project(Document):
self.gross_margin = flt(self.total_billed_amount) - expense_amount
if self.total_billed_amount:
self.per_gross_margin = (self.gross_margin / flt(self.total_billed_amount)) * 100
else:
self.per_gross_margin = 0
def update_purchase_costing(self):
total_purchase_cost = calculate_total_purchase_cost(self.name)

View File

@@ -0,0 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7"/>
<path d="M19.2857 15.4286H22.1786C23.7763 15.4286 25.0714 16.7237 25.0714 18.3214V24.1071C25.0714 25.7048 23.7763 27 22.1786 27H19.2857V38.5714H15.4286V27H11.5714V23.1428H21.2143V19.2857H11.5714V15.4286H15.4286V11.5714H19.2857V15.4286ZM38.5714 38.5714H34.7143V34.7143H31.8214C30.2238 34.7143 28.9286 33.4191 28.9286 31.8214V26.0357C28.9286 24.438 30.2238 23.1428 31.8214 23.1428H34.7143V11.5714H38.5714V23.1428H42.4286V27H32.7857V30.8571H42.4286V34.7143H38.5714V38.5714Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 787 B

View File

@@ -1,4 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7"/>
<path d="M31.4897 14.5C32.639 14.5 33.7415 14.9569 34.5542 15.7695L38.231 19.4463C39.0435 20.2589 39.5005 21.3616 39.5005 22.5107V36.167C39.5 38.5598 37.5594 40.5 35.1665 40.5H13.5005V37.0332H36.0337C36.0005 37 36.0332 36.6452 36.0337 36.167V22.5107C36.0337 22.281 35.9422 22.06 35.7798 21.8975L32.103 18.2207C31.9405 18.0582 31.7196 17.9668 31.4897 17.9668H29.1001V14.5H31.4897ZM23.5571 14.5H27.3667V17.9668H23.5571V22.2998H23.8999C25.4121 22.2998 26.8745 22.8788 27.9624 23.9277C29.0525 24.9789 29.6772 26.4188 29.6772 27.9336C29.6771 29.4482 29.0524 30.8874 27.9624 31.9385C26.8745 32.9873 25.4121 33.5664 23.8999 33.5664H23.5571V35.9805H20.0903V33.5664H15.7817V30.0996H20.0903V25.7666H19.5659C18.0537 25.7664 16.5912 25.1876 15.5034 24.1387C14.4135 23.0877 13.7888 21.6483 13.7886 20.1338C13.7886 18.6191 14.4135 17.1791 15.5034 16.1279C16.5912 15.079 18.0538 14.5002 19.5659 14.5H20.0903V12.7666H23.5571V14.5ZM23.5571 30.0996H23.8999C24.5325 30.0996 25.1273 29.8567 25.5562 29.4434C25.9828 29.0319 26.2103 28.4873 26.2104 27.9336C26.2104 27.3798 25.9829 26.8354 25.5562 26.4238C25.1272 26.0102 24.5327 25.7666 23.8999 25.7666H23.5571V30.0996ZM19.5659 17.9668C18.9334 17.967 18.3385 18.2097 17.9097 18.623C17.4831 19.0346 17.2554 19.5801 17.2554 20.1338C17.2556 20.6874 17.483 21.2322 17.9097 21.6436C18.3385 22.057 18.9334 22.2996 19.5659 22.2998H20.0903V17.9668H19.5659Z" fill="white"/>
<path d="M32.5522 11.5714C33.8307 11.5716 35.057 12.0799 35.9611 12.9839L40.0518 17.0746C40.9558 17.9787 41.4641 19.205 41.4643 20.4835V35.6786C41.4643 38.3413 39.3056 40.4999 36.6429 40.5H17.941C14.0902 40.4999 11.7934 36.2082 13.9294 33.0042L16.7677 28.7478C18.4246 26.2629 22.4502 26.4437 24.1071 28.9286L28.2393 22.1503L31.5465 24.1354L27.597 30.7196C26.0159 33.3546 21.9546 33.4143 20.25 30.8574C20.5231 30.8574 19.9769 30.8574 20.25 30.8574L17.1387 35.1437C16.7115 35.7845 17.1709 36.6428 17.941 36.6428H37.6071C37.6071 36.6428 37.6071 36.2111 37.6071 35.6786V20.4835C37.607 20.228 37.5053 19.9824 37.3246 19.8017L33.234 15.7111C33.0533 15.5304 32.8077 15.4287 32.5522 15.4286H16.3929C16.3929 15.4286 16.393 15.8604 16.3929 16.3928V23.1428H12.5357V16.3928C12.5358 13.7301 14.6944 11.5714 17.3571 11.5714H32.5522Z" fill="white"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,4 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7"/>
<path d="M36.8127 12C39.4016 12 41.5002 14.0987 41.5002 16.6875V37.3125C41.5002 39.9013 39.4016 42 36.8127 42H16.1877C13.5989 42 11.5002 39.9013 11.5002 37.3125V16.6875C11.5002 14.0987 13.5989 12 16.1877 12H36.8127ZM15.2502 38.25H37.7502V25.125H15.2502V38.25ZM22.7502 36.375H19.0002V32.625H22.7502V36.375ZM28.3752 36.375H24.6252V32.625H28.3752V36.375ZM34.0002 36.375H30.2502V32.625H34.0002V36.375ZM22.7502 30.75H19.0002V27H22.7502V30.75ZM28.3752 30.75H24.6252V27H28.3752V30.75ZM34.0002 30.75H30.2502V27H34.0002V30.75ZM15.2502 21.375H37.7502V15.75H15.2502V21.375ZM35.8752 20.4375H30.2502V16.6875H35.8752V20.4375Z" fill="white"/>
<path d="M34.6209 11.5228C35.9515 11.5229 37.21 12.0858 38.0412 13.0518L40.9712 16.4569C41.5924 17.1789 41.9316 18.0762 41.9317 19.0008V36.2889C41.9317 40.1042 36.7946 41.8256 34.1769 38.8872L32.7866 37.3263L30.1714 39.465C28.6326 40.7243 26.3336 40.7459 24.7695 39.5138L21.9828 37.3169L20.5602 38.966C18.051 41.8744 13.0031 40.2255 13.0031 36.4971H17.1358L18.7582 34.9137C20.2135 33.2268 22.8799 32.9886 24.6545 34.3865L27.4412 36.5815L30.0564 34.4428C31.8061 33.0108 34.4819 33.207 35.9688 34.8762L37.799 36.5065V19.0008C37.7989 18.9489 37.7794 18.8989 37.7446 18.8582L34.8146 15.4513C34.7676 15.3967 34.696 15.3651 34.6209 15.365H17.1358V15.5957V30.7338H13.0031V15.5957C13.0031 13.3464 14.9646 11.5228 17.384 11.5228H34.6209Z" fill="white"/>
</svg>

Before

Width:  |  Height:  |  Size: 928 B

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -0,0 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7"/>
<path d="M19.2857 18.3214H15.4286C15.4286 18.3214 15.4286 18.7532 15.4286 19.2857V36.6429C15.4286 37.1754 15.4286 37.6072 15.4286 37.6072H38.5714C38.5714 37.6072 38.5714 37.1754 38.5714 36.6429V19.2857C38.5714 18.7532 38.5714 18.3214 38.5714 18.3214H34.7143V14.4643H37.6072C40.27 14.4643 42.4286 16.6229 42.4286 19.2857V36.6429C42.4286 39.3057 40.27 41.4643 37.6072 41.4643H16.3929C13.7301 41.4643 11.5714 39.3057 11.5714 36.6429V19.2857C11.5714 16.6229 13.7301 14.4643 16.3929 14.4643H19.2857V18.3214ZM36.6429 35.6786H28.9286V31.8214H36.6429V35.6786ZM33.9214 12.9067L32.7857 14.4643L31.6501 16.0237L28.9286 14.0387V24.1072H25.0714V14.0387L22.35 16.0237L21.2143 14.4643L20.0786 12.9067L25.8643 8.68988H28.1357L33.9214 12.9067Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -0,0 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7" fill-opacity="0.1"/>
<path d="M19.2857 15.4286H22.1786C23.7762 15.4286 25.0714 16.7238 25.0714 18.3215V24.1072C25.0714 25.7049 23.7762 27 22.1786 27H19.2857V38.5715H15.4286V27H11.5714V23.1429H21.2143V19.2858H11.5714V15.4286H15.4286V11.5715H19.2857V15.4286ZM38.5714 38.5715H34.7143V34.7143H31.8214C30.2237 34.7143 28.9286 33.4192 28.9286 31.8215V26.0358C28.9286 24.4381 30.2237 23.1429 31.8214 23.1429H34.7143V11.5715H38.5714V23.1429H42.4286V27H32.7857V30.8572H42.4286V34.7143H38.5714V38.5715Z" fill="#0981E3"/>
</svg>

After

Width:  |  Height:  |  Size: 809 B

View File

@@ -1,4 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7" fill-opacity="0.09"/>
<path d="M31.4897 14.5C32.639 14.5 33.7415 14.9569 34.5542 15.7695L38.231 19.4463C39.0435 20.2589 39.5005 21.3616 39.5005 22.5107V36.167C39.5 38.5598 37.5594 40.5 35.1665 40.5H13.5005V37.0332H36.0337C36.0005 37 36.0332 36.6452 36.0337 36.167V22.5107C36.0337 22.281 35.9422 22.06 35.7798 21.8975L32.103 18.2207C31.9405 18.0582 31.7196 17.9668 31.4897 17.9668H29.1001V14.5H31.4897ZM23.5571 14.5H27.3667V17.9668H23.5571V22.2998H23.8999C25.4121 22.2998 26.8745 22.8788 27.9624 23.9277C29.0525 24.9789 29.6772 26.4188 29.6772 27.9336C29.6771 29.4482 29.0524 30.8874 27.9624 31.9385C26.8745 32.9873 25.4121 33.5664 23.8999 33.5664H23.5571V35.9805H20.0903V33.5664H15.7817V30.0996H20.0903V25.7666H19.5659C18.0537 25.7664 16.5912 25.1876 15.5034 24.1387C14.4135 23.0877 13.7888 21.6483 13.7886 20.1338C13.7886 18.6191 14.4135 17.1791 15.5034 16.1279C16.5912 15.079 18.0538 14.5002 19.5659 14.5H20.0903V12.7666H23.5571V14.5ZM23.5571 30.0996H23.8999C24.5325 30.0996 25.1273 29.8567 25.5562 29.4434C25.9828 29.0319 26.2103 28.4873 26.2104 27.9336C26.2104 27.3798 25.9829 26.8354 25.5562 26.4238C25.1272 26.0102 24.5327 25.7666 23.8999 25.7666H23.5571V30.0996ZM19.5659 17.9668C18.9334 17.967 18.3385 18.2097 17.9097 18.623C17.4831 19.0346 17.2554 19.5801 17.2554 20.1338C17.2556 20.6874 17.483 21.2322 17.9097 21.6436C18.3385 22.057 18.9334 22.2996 19.5659 22.2998H20.0903V17.9668H19.5659Z" fill="#0981E3"/>
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7" fill-opacity="0.1"/>
<path d="M32.5522 11.5715C33.8306 11.5716 35.057 12.08 35.9611 12.984L40.0517 17.0747C40.9558 17.9787 41.4641 19.2051 41.4643 20.4836V35.6786C41.4643 38.3414 39.3056 40.5 36.6428 40.5H17.941C14.0902 40.5 11.7934 36.2083 13.9294 33.0042L16.7676 28.7478C18.4246 26.2629 22.4502 26.4437 24.1071 28.9286L28.2392 22.1504L31.5464 24.1354L27.597 30.7197C26.0159 33.3547 21.9546 33.4143 20.25 30.8574C20.5231 30.8574 19.9769 30.8574 20.25 30.8574L17.1387 35.1437C16.7115 35.7845 17.1709 36.6428 17.941 36.6429H37.6071C37.6071 36.6429 37.6071 36.2111 37.6071 35.6786V20.4836C37.607 20.2281 37.5053 19.9825 37.3246 19.8018L33.2339 15.7111C33.0533 15.5305 32.8077 15.4288 32.5522 15.4286H16.3928C16.3928 15.4286 16.3929 15.8604 16.3928 16.3929V23.1429H12.5357V16.3929C12.5358 13.7302 14.6944 11.5715 17.3571 11.5715H32.5522Z" fill="#0981E3"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7" fill-opacity="0.1"/>
<path d="M34.6208 11.5228C35.9514 11.5229 37.2099 12.0858 38.0412 13.0518L40.9712 16.4569C41.5924 17.1789 41.9315 18.0762 41.9317 19.0008V36.2888C41.9317 40.1042 36.7945 41.8256 34.1769 38.8872L32.7866 37.3263L30.1714 39.465C28.6326 40.7243 26.3335 40.7459 24.7695 39.5138L21.9827 37.3169L20.5601 38.966C18.051 41.8744 13.0031 40.2255 13.0031 36.4971H17.1358L18.7581 34.9137C20.2135 33.2268 22.8798 32.9886 24.6544 34.3865L27.4412 36.5815L30.0563 34.4428C31.8061 33.0108 34.4819 33.207 35.9688 34.8762L37.799 36.5065V19.0008C37.7989 18.9489 37.7793 18.8989 37.7445 18.8582L34.8145 15.4513C34.7676 15.3967 34.696 15.3651 34.6208 15.365H17.1358V15.5957V30.7338H13.0031V15.5957C13.0031 13.3464 14.9646 11.5228 17.384 11.5228H34.6208Z" fill="#0981E3"/>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -0,0 +1,4 @@
<svg width="54" height="54" viewBox="0 0 54 54" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M38.5714 0H15.4286C6.90761 0 0 6.90761 0 15.4286V38.5714C0 47.0924 6.90761 54 15.4286 54H38.5714C47.0924 54 54 47.0924 54 38.5714V15.4286C54 6.90761 47.0924 0 38.5714 0Z" fill="#0289F7" fill-opacity="0.1"/>
<path d="M19.2858 18.3214H15.4286C15.4286 18.3214 15.4286 18.7532 15.4286 19.2857V36.6429C15.4286 37.1754 15.4286 37.6072 15.4286 37.6072H38.5715C38.5715 37.6072 38.5715 37.1754 38.5715 36.6429V19.2857C38.5715 18.7532 38.5715 18.3214 38.5715 18.3214H34.7143V14.4643H37.6072C40.27 14.4643 42.4286 16.6229 42.4286 19.2857V36.6429C42.4286 39.3057 40.27 41.4643 37.6072 41.4643H16.3929C13.7301 41.4643 11.5715 39.3057 11.5715 36.6429V19.2857C11.5715 16.6229 13.7301 14.4643 16.3929 14.4643H19.2858V18.3214ZM36.6429 35.6786H28.9286V31.8214H36.6429V35.6786ZM33.9214 12.9067L32.7858 14.4643L31.6501 16.0237L28.9286 14.0387V24.1072H25.0715V14.0387L22.35 16.0237L21.2143 14.4643L20.0787 12.9067L25.8644 8.68988H28.1357L33.9214 12.9067Z" fill="#0981E3"/>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -625,6 +625,7 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
callback: function (r) {
if (!r.exc) {
me.frm.refresh_fields();
me.show_batch_dialog_if_required(item);
}
},
});
@@ -635,26 +636,13 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
process_item_selection(doc, cdt, cdn) {
var item = frappe.get_doc(cdt, cdn);
let update_stock = 0;
var me = this;
var update_stock = 0,
show_batch_dialog = 0;
item.weight_per_unit = 0;
item.weight_uom = "";
item.uom = null; // make UOM blank to update the existing UOM when item changes
item.conversion_factor = 0;
if (["Sales Invoice", "Purchase Invoice"].includes(this.frm.doc.doctype)) {
update_stock = cint(me.frm.doc.update_stock);
show_batch_dialog = update_stock;
} else if (this.frm.doc.doctype === "Purchase Receipt" || this.frm.doc.doctype === "Delivery Note") {
show_batch_dialog = 1;
}
if (show_batch_dialog && item.use_serial_batch_fields === 1) {
show_batch_dialog = 0;
}
item.barcode = null;
if (item.item_code || item.serial_no) {
@@ -765,74 +753,7 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
}
},
() => me.toggle_conversion_factor(item),
() => {
if (show_batch_dialog && !frappe.flags.trigger_from_barcode_scanner)
return frappe.db
.get_value("Item", item.item_code, [
"has_batch_no",
"has_serial_no",
])
.then((r) => {
if (
r.message &&
(r.message.has_batch_no || r.message.has_serial_no)
) {
frappe.flags.hide_serial_batch_dialog = false;
} else {
show_batch_dialog = false;
}
});
},
() => {
// check if batch serial selector is disabled or not
if (show_batch_dialog && !frappe.flags.hide_serial_batch_dialog)
return frappe.db
.get_single_value(
"Stock Settings",
"disable_serial_no_and_batch_selector"
)
.then((value) => {
if (value) {
frappe.flags.hide_serial_batch_dialog = true;
}
});
},
() => {
if (
show_batch_dialog &&
!frappe.flags.hide_serial_batch_dialog &&
!frappe.flags.dialog_set
) {
var d = locals[cdt][cdn];
$.each(r.message, function (k, v) {
if (!d[k]) d[k] = v;
});
if (d.has_batch_no && d.has_serial_no) {
d.batch_no = undefined;
}
frappe.flags.dialog_set = true;
erpnext.show_serial_batch_selector(
me.frm,
d,
(item) => {
me.frm.script_manager.trigger("qty", item.doctype, item.name);
if (!me.frm.doc.set_warehouse)
me.frm.script_manager.trigger(
"warehouse",
item.doctype,
item.name
);
me.apply_price_list(item, true);
},
undefined,
!frappe.flags.hide_serial_batch_dialog
);
} else {
frappe.flags.dialog_set = false;
}
},
() => me.show_batch_dialog_if_required(item),
() => me.conversion_factor(doc, cdt, cdn, true),
() => me.remove_pricing_rule(item),
() => {
@@ -853,6 +774,78 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
}
}
show_batch_dialog_if_required(item) {
let show_batch_dialog = 0;
let update_stock = 0;
let me = this;
if (!item.item_code) {
return;
}
if (["Sales Invoice", "Purchase Invoice"].includes(this.frm.doc.doctype)) {
update_stock = cint(me.frm.doc.update_stock);
show_batch_dialog = update_stock;
} else if (this.frm.doc.doctype === "Purchase Receipt" || this.frm.doc.doctype === "Delivery Note") {
show_batch_dialog = 1;
}
if (show_batch_dialog && item.use_serial_batch_fields === 1) {
show_batch_dialog = 0;
}
frappe.run_serially([
() => {
if (show_batch_dialog && !frappe.flags.trigger_from_barcode_scanner)
return frappe.db
.get_value("Item", item.item_code, ["has_batch_no", "has_serial_no"])
.then((r) => {
if (r.message && (r.message.has_batch_no || r.message.has_serial_no)) {
item.has_serial_no = r.message.has_serial_no;
item.has_batch_no = r.message.has_batch_no;
frappe.flags.hide_serial_batch_dialog = false;
} else {
show_batch_dialog = false;
}
});
},
() => {
// check if batch serial selector is disabled or not
if (show_batch_dialog && !frappe.flags.hide_serial_batch_dialog)
return frappe.db
.get_single_value("Stock Settings", "disable_serial_no_and_batch_selector")
.then((value) => {
if (value) {
frappe.flags.hide_serial_batch_dialog = true;
}
});
},
() => {
if (show_batch_dialog && !frappe.flags.hide_serial_batch_dialog && !frappe.flags.dialog_set) {
if (item.has_batch_no && item.has_serial_no) {
item.batch_no = undefined;
}
frappe.flags.dialog_set = true;
erpnext.show_serial_batch_selector(
me.frm,
item,
(item) => {
me.frm.script_manager.trigger("qty", item.doctype, item.name);
if (!me.frm.doc.set_warehouse)
me.frm.script_manager.trigger("warehouse", item.doctype, item.name);
me.apply_price_list(item, true);
},
undefined,
!frappe.flags.hide_serial_batch_dialog
);
} else {
frappe.flags.dialog_set = false;
}
},
]);
}
price_list_rate(doc, cdt, cdn) {
var item = frappe.get_doc(cdt, cdn);
frappe.model.round_floats_in(item, ["price_list_rate", "discount_percentage"]);

View File

@@ -994,7 +994,7 @@ erpnext.utils.map_current_doc = function (opts) {
if (opts.source_doctype) {
let data_fields = [];
if (["Purchase Receipt", "Delivery Note"].includes(opts.source_doctype)) {
if (["Purchase Receipt", "Delivery Note", "Purchase Invoice"].includes(opts.source_doctype)) {
let target_meta = frappe.get_meta(cur_frm.doc.doctype);
if (target_meta.fields.find((f) => f.fieldname === "taxes")) {
data_fields.push({
@@ -1069,17 +1069,9 @@ frappe.form.link_formatters["Project"] = function (value, doc, df) {
* @returns {string} - The link value with the added title.
*/
function add_link_title(value, doc, df, title_field) {
if (doc.doctype != df.parent) {
return "";
} else if (
doc &&
value &&
doc[title_field] &&
doc[title_field] !== value &&
doc[df.fieldname] === value
) {
if (doc && value && doc[title_field] && doc[title_field] !== value && doc[df.fieldname] === value) {
return value + ": " + doc[title_field];
} else if (!value && doc.doctype && doc[title_field]) {
} else if (!value && doc.doctype && doc[title_field] && doc.doctype == df.parent) {
return doc[title_field];
} else {
return value;

View File

@@ -138,14 +138,14 @@ erpnext.utils.BarcodeScanner = class BarcodeScanner {
frappe.run_serially([
() => this.set_selector_trigger_flag(data),
() => this.set_serial_no(row, serial_no),
() => this.set_batch_no(row, batch_no),
() => this.set_barcode(row, barcode),
() => this.set_warehouse(row),
() =>
this.set_item(row, item_code, barcode, batch_no, serial_no).then((qty) => {
this.show_scan_message(row.idx, !is_new_row, qty);
}),
() => this.set_serial_no(row, serial_no),
() => this.set_batch_no(row, batch_no),
() => this.clean_up(),
() => this.set_barcode_uom(row, uom),
() => this.revert_selector_flag(),

View File

@@ -1,11 +1,5 @@
frappe.provide("erpnext.demo");
$(document).on("toolbar_setup", function () {
if (frappe.boot.sysdefaults.demo_company) {
render_clear_demo_action();
}
});
$(document).on("desktop_screen", function (event, data) {
data.desktop.add_menu_item({
label: __("Clear Demo Data"),
@@ -19,16 +13,6 @@ $(document).on("desktop_screen", function (event, data) {
});
});
function render_clear_demo_action() {
let demo_action = $(
`<a class="dropdown-item" onclick="return erpnext.demo.clear_demo()">
${__("Clear Demo Data")}
</a>`
);
demo_action.appendTo($("#toolbar-user"));
}
erpnext.demo.clear_demo = function () {
frappe.confirm(__("Are you sure you want to clear all demo data?"), () => {
frappe.call({
@@ -44,4 +28,4 @@ erpnext.demo.clear_demo = function () {
},
});
});
};
};

View File

@@ -142,7 +142,14 @@ def download_zip(files, output_filename):
def get_invoice_summary(items, taxes, item_wise_tax_details):
summary_data = frappe._dict()
taxes_wise_tax_details = {d.tax_row: d for d in item_wise_tax_details}
taxes_wise_tax_details = {}
for d in item_wise_tax_details:
if d.tax_row not in taxes_wise_tax_details:
taxes_wise_tax_details[d.tax_row] = []
taxes_wise_tax_details[d.tax_row].append(d)
for tax in taxes:
# Include only VAT charges.
if tax.charge_type == "Actual":

View File

@@ -512,6 +512,9 @@ def _set_missing_values(source, target):
if contact:
target.contact_person = contact[0].parent
target.contact_display, target.contact_email, target.contact_mobile = frappe.get_value(
"Contact", contact[0].parent, ["full_name", "email_id", "mobile_no"]
)
@frappe.whitelist()

View File

@@ -3,7 +3,7 @@
"allow_auto_repeat": 1,
"allow_import": 1,
"autoname": "naming_series:",
"creation": "2013-05-24 19:29:08",
"creation": "2026-01-29 21:18:32.391385",
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
@@ -1115,14 +1115,15 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"icon": "fa fa-shopping-cart",
"idx": 82,
"is_submittable": 1,
"links": [],
"modified": "2025-07-31 17:23:48.875382",
"modified": "2026-01-29 21:18:48.836168",
"modified_by": "Administrator",
"module": "Selling",
"name": "Quotation",

View File

@@ -450,7 +450,10 @@ def _make_sales_order(source_name, target_doc=None, ignore_permissions=False, ar
"Quotation",
source_name,
{
"Quotation": {"doctype": "Sales Order", "validation": {"docstatus": ["=", 1]}},
"Quotation": {
"doctype": "Sales Order",
"validation": {"docstatus": ["=", 1]},
},
"Quotation Item": {
"doctype": "Sales Order Item",
"field_map": {"parent": "prevdoc_docname", "name": "quotation_item"},
@@ -553,6 +556,8 @@ def _make_customer(source_name, ignore_permissions=False):
if quotation.quotation_to == "Customer":
return frappe.get_doc("Customer", quotation.party_name)
elif quotation.quotation_to == "CRM Deal":
return frappe.get_doc("Customer", {"crm_deal": quotation.party_name})
# Check if a Customer already exists for the Lead or Prospect.
existing_customer = None
@@ -613,27 +618,9 @@ def handle_mandatory_error(e, customer, lead_name):
frappe.throw(message, title=_("Mandatory Missing"))
@frappe.whitelist()
def get_ordered_items(quotation: str):
"""
Returns a dict of ordered items with their total qty based on quotation row name.
In `Sales Order Item`, `quotation_item` is the row name of `Quotation Item`.
Example:
```
{
"refsdjhd2": 10,
"ygdhdshrt": 5,
}
```
"""
return frappe._dict(
frappe.get_all(
"Sales Order Item",
filters={"prevdoc_docname": quotation, "docstatus": 1},
fields=["quotation_item", {"SUM": "qty"}],
group_by="quotation_item",
as_list=1,
"Quotation Item", {"docstatus": 1, "parent": quotation}, ["name", "ordered_qty"], as_list=True
)
)

View File

@@ -934,7 +934,7 @@ class TestQuotation(IntegrationTestCase):
# item code same but description different
make_item("_Test Item 2", {"is_stock_item": 1})
quotation = make_quotation(qty=1, rate=100, do_not_submit=1)
quotation = make_quotation(qty=10, rate=100, do_not_submit=1)
# duplicate items
for qty in [1, 1, 2, 3]:
@@ -948,7 +948,7 @@ class TestQuotation(IntegrationTestCase):
sales_order.delivery_date = nowdate()
self.assertEqual(len(sales_order.items), 6)
self.assertEqual(sales_order.items[0].qty, 1)
self.assertEqual(sales_order.items[0].qty, 10)
self.assertEqual(sales_order.items[-1].qty, 5)
# Row 1: 10, Row 4: 1, Row 5: 1
@@ -991,6 +991,16 @@ class TestQuotation(IntegrationTestCase):
f"Expected conversion rate {expected_rate}, got {quotation.conversion_rate}",
)
def test_over_order_limit(self):
quotation = make_quotation(qty=5)
so1 = make_sales_order(quotation.name)
so2 = make_sales_order(quotation.name)
so1.delivery_date = nowdate()
so2.delivery_date = nowdate()
so1.submit()
self.assertRaises(frappe.ValidationError, so2.submit)
def enable_calculate_bundle_price(enable=1):
selling_settings = frappe.get_doc("Selling Settings")

View File

@@ -27,6 +27,7 @@
"uom",
"conversion_factor",
"stock_qty",
"ordered_qty",
"available_quantity_section",
"actual_qty",
"column_break_ylrv",
@@ -694,19 +695,31 @@
"print_hide": 1,
"read_only": 1,
"report_hide": 1
},
{
"default": "0",
"fieldname": "ordered_qty",
"fieldtype": "Float",
"hidden": 1,
"label": "Ordered Qty",
"no_copy": 1,
"non_negative": 1,
"read_only": 1,
"reqd": 1
}
],
"idx": 1,
"istable": 1,
"links": [],
"modified": "2025-08-26 20:31:47.775890",
"modified": "2026-01-30 12:56:08.320190",
"modified_by": "Administrator",
"module": "Selling",
"name": "Quotation Item",
"owner": "Administrator",
"permissions": [],
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View File

@@ -48,6 +48,7 @@ class QuotationItem(Document):
margin_type: DF.Literal["", "Percentage", "Amount"]
net_amount: DF.Currency
net_rate: DF.Currency
ordered_qty: DF.Float
page_break: DF.Check
parent: DF.Data
parentfield: DF.Data

View File

@@ -1704,7 +1704,8 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"grid_page_length": 50,
@@ -1712,7 +1713,7 @@
"idx": 105,
"is_submittable": 1,
"links": [],
"modified": "2025-10-12 12:14:29.760988",
"modified": "2026-01-29 21:23:48.362401",
"modified_by": "Administrator",
"module": "Selling",
"name": "Sales Order",

View File

@@ -194,6 +194,16 @@ class SalesOrder(SellingController):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.status_updater = [
{
"source_dt": "Sales Order Item",
"target_dt": "Quotation Item",
"join_field": "quotation_item",
"target_field": "ordered_qty",
"target_ref_field": "stock_qty",
"source_field": "stock_qty",
}
]
def onload(self) -> None:
super().onload()
@@ -481,6 +491,7 @@ class SalesOrder(SellingController):
frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code))
def on_submit(self):
super().update_prevdoc_status()
self.check_credit_limit()
self.update_reserved_qty()
self.delete_removed_delivery_schedule_items()

View File

@@ -57,6 +57,7 @@ class TestSalesOrder(AccountsTestMixin, IntegrationTestCase):
frappe.db.rollback()
frappe.set_user("Administrator")
@IntegrationTestCase.change_settings("Selling Settings", {"allow_negative_rates_for_items": 1})
def test_sales_order_with_negative_rate(self):
"""
Test if negative rate is allowed in Sales Order via doc submission and update items

View File

@@ -39,6 +39,7 @@
"enable_cutoff_date_on_bulk_delivery_note_creation",
"allow_zero_qty_in_quotation",
"allow_zero_qty_in_sales_order",
"set_zero_rate_for_expired_batch",
"experimental_section",
"use_legacy_js_reactivity",
"subcontracting_inward_tab",
@@ -289,6 +290,13 @@
"fieldname": "use_legacy_js_reactivity",
"fieldtype": "Check",
"label": "Use Legacy (Client side) Reactivity"
},
{
"default": "0",
"description": "If enabled, system will set incoming rate as zero for stand-alone credit notes with expired batch item.",
"fieldname": "set_zero_rate_for_expired_batch",
"fieldtype": "Check",
"label": "Set Incoming Rate as Zero for Expired Batch"
}
],
"grid_page_length": 50,
@@ -298,7 +306,7 @@
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2026-01-21 17:28:37.027837",
"modified": "2026-01-23 00:04:33.105916",
"modified_by": "Administrator",
"module": "Selling",
"name": "Selling Settings",

View File

@@ -44,6 +44,7 @@ class SellingSettings(Document):
role_to_override_stop_action: DF.Link | None
sales_update_frequency: DF.Literal["Monthly", "Each Transaction", "Daily"]
selling_price_list: DF.Link | None
set_zero_rate_for_expired_batch: DF.Check
so_required: DF.Literal["No", "Yes"]
territory: DF.Link | None
use_legacy_js_reactivity: DF.Check

View File

@@ -182,6 +182,10 @@ def create_transaction_deletion_record(company):
transaction_deletion_record.company = company
transaction_deletion_record.process_in_single_transaction = True
transaction_deletion_record.save(ignore_permissions=True)
transaction_deletion_record.generate_to_delete_list()
transaction_deletion_record.reload()
transaction_deletion_record.submit()
transaction_deletion_record.start_deletion_tasks()

View File

@@ -1083,6 +1083,8 @@ def get_billing_shipping_address(name, billing_address=None, shipping_address=No
@frappe.whitelist()
def create_transaction_deletion_request(company):
frappe.only_for("System Manager")
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
is_deletion_doc_running,
)
@@ -1090,12 +1092,16 @@ def create_transaction_deletion_request(company):
is_deletion_doc_running(company)
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.generate_to_delete_list()
tdr.reload()
tdr.submit()
tdr.start_deletion_tasks()
frappe.msgprint(
_("A Transaction Deletion Document: {0} is triggered for {0}").format(
get_link_to_form("Transaction Deletion Record", tdr.name)
),
frappe.bold(company),
_("Transaction Deletion Document {0} has been triggered for company {1}").format(
get_link_to_form("Transaction Deletion Record", tdr.name), frappe.bold(company)
)
)

View File

@@ -13,6 +13,7 @@
"hide_currency_symbol",
"disable_rounded_total",
"disable_in_words",
"use_posting_datetime_for_naming_documents",
"demo_company"
],
"fields": [
@@ -80,6 +81,13 @@
"label": "Demo Company",
"options": "Company",
"read_only": 1
},
{
"default": "0",
"description": "When checked, the system will use the posting datetime of the document for naming the document instead of the creation datetime of the document.",
"fieldname": "use_posting_datetime_for_naming_documents",
"fieldtype": "Check",
"label": "Use Posting Datetime for Naming Documents"
}
],
"grid_page_length": 50,
@@ -89,7 +97,7 @@
"in_create": 1,
"issingle": 1,
"links": [],
"modified": "2026-01-02 18:13:13.421866",
"modified": "2026-01-12 09:45:59.819161",
"modified_by": "Administrator",
"module": "Setup",
"name": "Global Defaults",

View File

@@ -39,6 +39,7 @@ class GlobalDefaults(Document):
disable_in_words: DF.Check
disable_rounded_total: DF.Check
hide_currency_symbol: DF.Literal["", "No", "Yes"]
use_posting_datetime_for_naming_documents: DF.Check
# end: auto-generated types
def on_update(self):

View File

@@ -8,38 +8,77 @@ from frappe.tests import IntegrationTestCase
class TestTransactionDeletionRecord(IntegrationTestCase):
def setUp(self):
# Clear all deletion cache flags from previous tests
self._clear_all_deletion_cache_flags()
create_company("Dunder Mifflin Paper Co")
def tearDown(self):
# Clean up all deletion cache flags after each test
self._clear_all_deletion_cache_flags()
frappe.db.rollback()
def _clear_all_deletion_cache_flags(self):
"""Clear all deletion_running_doctype:* cache keys"""
# Get all keys matching the pattern
cache_keys = frappe.cache.get_keys("deletion_running_doctype:*")
if cache_keys:
for key in cache_keys:
# Decode bytes to string if needed
key_str = key.decode() if isinstance(key, bytes) else key
# Extract just the key name (remove site prefix if present)
# Keys are in format: site_prefix|deletion_running_doctype:DocType
if "|" in key_str:
key_name = key_str.split("|")[1]
else:
key_name = key_str
frappe.cache.delete_value(key_name)
def test_doctypes_contain_company_field(self):
tdr = create_transaction_deletion_doc("Dunder Mifflin Paper Co")
for doctype in tdr.doctypes:
contains_company = False
doctype_fields = frappe.get_meta(doctype.doctype_name).as_dict()["fields"]
for doctype_field in doctype_fields:
if doctype_field["fieldtype"] == "Link" and doctype_field["options"] == "Company":
contains_company = True
break
self.assertTrue(contains_company)
"""Test that all DocTypes in To Delete list have a valid company link field"""
tdr = create_and_submit_transaction_deletion_doc("Dunder Mifflin Paper Co")
for doctype_row in tdr.doctypes_to_delete:
# If company_field is specified, verify it's a valid Company link field
if doctype_row.company_field:
field_found = False
doctype_fields = frappe.get_meta(doctype_row.doctype_name).as_dict()["fields"]
for doctype_field in doctype_fields:
if (
doctype_field["fieldname"] == doctype_row.company_field
and doctype_field["fieldtype"] == "Link"
and doctype_field["options"] == "Company"
):
field_found = True
break
self.assertTrue(
field_found,
f"DocType {doctype_row.doctype_name} should have company field '{doctype_row.company_field}'",
)
def test_no_of_docs_is_correct(self):
for _i in range(5):
"""Test that document counts are calculated correctly in To Delete list"""
for _ in range(5):
create_task("Dunder Mifflin Paper Co")
tdr = create_transaction_deletion_doc("Dunder Mifflin Paper Co")
tdr = create_and_submit_transaction_deletion_doc("Dunder Mifflin Paper Co")
tdr.reload()
for doctype in tdr.doctypes:
# Check To Delete list has correct count
task_found = False
for doctype in tdr.doctypes_to_delete:
if doctype.doctype_name == "Task":
self.assertEqual(doctype.no_of_docs, 5)
self.assertEqual(doctype.document_count, 5)
task_found = True
break
self.assertTrue(task_found, "Task should be in To Delete list")
def test_deletion_is_successful(self):
"""Test that deletion actually removes documents"""
create_task("Dunder Mifflin Paper Co")
create_transaction_deletion_doc("Dunder Mifflin Paper Co")
create_and_submit_transaction_deletion_doc("Dunder Mifflin Paper Co")
tasks_containing_company = frappe.get_all("Task", filters={"company": "Dunder Mifflin Paper Co"})
self.assertEqual(tasks_containing_company, [])
def test_company_transaction_deletion_request(self):
"""Test creation via company deletion request method"""
from erpnext.setup.doctype.company.company import create_transaction_deletion_request
# don't reuse below company for other test cases
@@ -49,15 +88,314 @@ class TestTransactionDeletionRecord(IntegrationTestCase):
# below call should not raise any exceptions or throw errors
create_transaction_deletion_request(company)
def test_generate_to_delete_list(self):
"""Test automatic generation of To Delete list"""
company = "Dunder Mifflin Paper Co"
create_task(company)
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
tdr.insert()
# Generate To Delete list
tdr.generate_to_delete_list()
tdr.reload()
# Should have at least Task in the list
self.assertGreater(len(tdr.doctypes_to_delete), 0)
task_in_list = any(d.doctype_name == "Task" for d in tdr.doctypes_to_delete)
self.assertTrue(task_in_list, "Task should be in To Delete list")
def test_validation_prevents_child_tables(self):
"""Test that child tables cannot be added to To Delete list"""
company = "Dunder Mifflin Paper Co"
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
tdr.append("doctypes_to_delete", {"doctype_name": "Sales Invoice Item"}) # Child table
# Should throw validation error
with self.assertRaises(frappe.ValidationError):
tdr.insert()
def test_validation_prevents_protected_doctypes(self):
"""Test that protected DocTypes cannot be added to To Delete list"""
company = "Dunder Mifflin Paper Co"
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
tdr.append("doctypes_to_delete", {"doctype_name": "DocType"}) # Protected
# Should throw validation error
with self.assertRaises(frappe.ValidationError):
tdr.insert()
def test_csv_export_import(self):
"""Test CSV export and import functionality with company_field column"""
company = "Dunder Mifflin Paper Co"
create_task(company)
# Create and generate To Delete list
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
tdr.insert()
tdr.generate_to_delete_list()
tdr.reload()
original_count = len(tdr.doctypes_to_delete)
self.assertGreater(original_count, 0)
# Export as CSV
tdr.export_to_delete_template_method()
csv_content = frappe.response.get("result")
self.assertIsNotNone(csv_content)
self.assertIn("doctype_name", csv_content)
self.assertIn("company_field", csv_content) # New: verify company_field column exists
# Create new record and import
tdr2 = frappe.new_doc("Transaction Deletion Record")
tdr2.company = company
tdr2.insert()
result = tdr2.import_to_delete_template_method(csv_content)
tdr2.reload()
# Should have same entries (counts may differ due to new task)
self.assertEqual(len(tdr2.doctypes_to_delete), original_count)
self.assertGreaterEqual(result["imported"], 1)
# Verify company_field values are preserved
for row in tdr2.doctypes_to_delete:
if row.doctype_name == "Task":
# Task should have company field set
self.assertIsNotNone(row.company_field, "Task should have company_field set after import")
def test_progress_tracking(self):
"""Test that deleted checkbox is marked when DocType deletion completes"""
company = "Dunder Mifflin Paper Co"
create_task(company)
tdr = create_and_submit_transaction_deletion_doc(company)
tdr.reload()
# After deletion, Task should be marked as deleted in To Delete list
# Note: Must match using composite key (doctype_name + company_field)
task_row = None
for doctype in tdr.doctypes_to_delete:
if doctype.doctype_name == "Task":
task_row = doctype
break
if task_row:
self.assertEqual(task_row.deleted, 1, "Task should be marked as deleted")
def test_composite_key_validation(self):
"""Test that duplicate (doctype_name + company_field) combinations are prevented"""
company = "Dunder Mifflin Paper Co"
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
tdr.append("doctypes_to_delete", {"doctype_name": "Task", "company_field": "company"})
tdr.append("doctypes_to_delete", {"doctype_name": "Task", "company_field": "company"}) # Duplicate!
# Should throw validation error for duplicate composite key
with self.assertRaises(frappe.ValidationError):
tdr.insert()
def test_same_doctype_different_company_field_allowed(self):
"""Test that same DocType can be added with different company_field values"""
company = "Dunder Mifflin Paper Co"
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
# Same DocType but one with company field, one without (None)
tdr.append("doctypes_to_delete", {"doctype_name": "Task", "company_field": "company"})
tdr.append("doctypes_to_delete", {"doctype_name": "Task", "company_field": None})
# Should NOT throw error - different company_field values are allowed
try:
tdr.insert()
self.assertEqual(
len(tdr.doctypes_to_delete),
2,
"Should allow 2 Task entries with different company_field values",
)
except frappe.ValidationError as e:
self.fail(f"Should allow same DocType with different company_field values, but got error: {e}")
def test_company_field_validation(self):
"""Test that invalid company_field values are rejected"""
company = "Dunder Mifflin Paper Co"
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
# Add Task with invalid company field
tdr.append("doctypes_to_delete", {"doctype_name": "Task", "company_field": "nonexistent_field"})
# Should throw validation error for invalid company field
with self.assertRaises(frappe.ValidationError):
tdr.insert()
def test_get_naming_series_prefix_with_dot(self):
"""Test prefix extraction for standard dot-separated naming series"""
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
TransactionDeletionRecord,
)
# Standard patterns with dot separator
self.assertEqual(TransactionDeletionRecord.get_naming_series_prefix("TDL.####", "Task"), "TDL")
self.assertEqual(TransactionDeletionRecord.get_naming_series_prefix("PREFIX.#####", "Task"), "PREFIX")
self.assertEqual(
TransactionDeletionRecord.get_naming_series_prefix("TASK-.YYYY.-.#####", "Task"), "TASK-.YYYY.-"
)
def test_get_naming_series_prefix_with_brace(self):
"""Test prefix extraction for format patterns with brace separators"""
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
TransactionDeletionRecord,
)
# Format patterns with brace separator
self.assertEqual(
TransactionDeletionRecord.get_naming_series_prefix("QA-ACT-{#####}", "Quality Action"), "QA-ACT-"
)
self.assertEqual(
TransactionDeletionRecord.get_naming_series_prefix("PREFIX-{####}", "Task"), "PREFIX-"
)
self.assertEqual(TransactionDeletionRecord.get_naming_series_prefix("{####}", "Task"), "")
def test_get_naming_series_prefix_fallback(self):
"""Test prefix extraction fallback for patterns without standard separators"""
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
TransactionDeletionRecord,
)
# Edge case: pattern with # but no dot or brace (shouldn't happen in practice)
self.assertEqual(TransactionDeletionRecord.get_naming_series_prefix("PREFIX####", "Task"), "PREFIX")
# Edge case: pattern with no # at all
self.assertEqual(
TransactionDeletionRecord.get_naming_series_prefix("JUSTPREFIX", "Task"), "JUSTPREFIX"
)
def test_cache_flag_management(self):
"""Test that cache flags can be set and cleared correctly"""
company = "Dunder Mifflin Paper Co"
create_task(company)
tdr = frappe.new_doc("Transaction Deletion Record")
tdr.company = company
tdr.insert()
tdr.generate_to_delete_list()
tdr.reload()
# Test _set_deletion_cache
tdr._set_deletion_cache()
# Verify flag is set for Task specifically
cached_value = frappe.cache.get_value("deletion_running_doctype:Task")
self.assertEqual(cached_value, tdr.name, "Cache flag should be set for Task")
# Test _clear_deletion_cache
tdr._clear_deletion_cache()
# Verify flag is cleared
cached_value = frappe.cache.get_value("deletion_running_doctype:Task")
self.assertIsNone(cached_value, "Cache flag should be cleared for Task")
def test_check_for_running_deletion_blocks_save(self):
"""Test that check_for_running_deletion_job blocks saves when cache flag exists"""
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
check_for_running_deletion_job,
)
company = "Dunder Mifflin Paper Co"
# Manually set cache flag to simulate running deletion
frappe.cache.set_value("deletion_running_doctype:Task", "TDR-00001", expires_in_sec=60)
try:
# Try to validate a new Task
new_task = frappe.new_doc("Task")
new_task.company = company
new_task.subject = "Should be blocked"
# Should throw error when cache flag exists
with self.assertRaises(frappe.ValidationError) as context:
check_for_running_deletion_job(new_task)
error_message = str(context.exception)
self.assertIn("currently deleting", error_message)
self.assertIn("TDR-00001", error_message)
finally:
# Cleanup: clear the manually set flag
frappe.cache.delete_value("deletion_running_doctype:Task")
def test_check_for_running_deletion_allows_save_when_no_flag(self):
"""Test that documents can be saved when no deletion is running"""
company = "Dunder Mifflin Paper Co"
# Ensure no cache flag exists
frappe.cache.delete_value("deletion_running_doctype:Task")
# Try to create and save a new Task
new_task = frappe.new_doc("Task")
new_task.company = company
new_task.subject = "Should be allowed"
# Should NOT throw error when no cache flag - actually save it
try:
new_task.insert()
# Cleanup
frappe.delete_doc("Task", new_task.name)
except frappe.ValidationError as e:
self.fail(f"Should allow save when no deletion is running, but got: {e}")
def test_only_one_deletion_allowed_globally(self):
"""Test that only one deletion can be submitted at a time (global enforcement)"""
company1 = "Dunder Mifflin Paper Co"
company2 = "Sabre Corporation"
create_company(company2)
# Create and submit first deletion (but don't start it)
tdr1 = frappe.new_doc("Transaction Deletion Record")
tdr1.company = company1
tdr1.insert()
tdr1.append("doctypes_to_delete", {"doctype_name": "Task", "company_field": "company"})
tdr1.save()
tdr1.submit() # Status becomes "Queued"
try:
# Try to submit second deletion for different company
tdr2 = frappe.new_doc("Transaction Deletion Record")
tdr2.company = company2 # Different company!
tdr2.insert()
tdr2.append("doctypes_to_delete", {"doctype_name": "Lead", "company_field": "company"})
tdr2.save()
# Should throw error - only one deletion allowed globally
with self.assertRaises(frappe.ValidationError) as context:
tdr2.submit()
self.assertIn("already", str(context.exception).lower())
self.assertIn(tdr1.name, str(context.exception))
finally:
# Cleanup
tdr1.cancel()
def create_company(company_name):
company = frappe.get_doc({"doctype": "Company", "company_name": company_name, "default_currency": "INR"})
company.insert(ignore_if_duplicate=True)
def create_transaction_deletion_doc(company):
def create_and_submit_transaction_deletion_doc(company):
"""Create and execute a transaction deletion record"""
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.generate_to_delete_list()
tdr.reload()
tdr.process_in_single_transaction = True
tdr.submit()
tdr.start_deletion_tasks()

View File

@@ -2,13 +2,58 @@
// For license information, please see license.txt
frappe.ui.form.on("Transaction Deletion Record", {
setup: function (frm) {
// Set up query for DocTypes to exclude child tables and virtual doctypes
// Note: Same DocType can be added multiple times with different company_field values
frm.set_query("doctype_name", "doctypes_to_delete", function () {
// Build exclusion list from protected and ignored doctypes
let excluded_doctypes = ["Transaction Deletion Record"]; // Always exclude self
// Add protected doctypes (fetched in onload)
if (frm.protected_doctypes_list && frm.protected_doctypes_list.length > 0) {
excluded_doctypes = excluded_doctypes.concat(frm.protected_doctypes_list);
}
// Add doctypes from the ignore list
if (frm.doc.doctypes_to_be_ignored && frm.doc.doctypes_to_be_ignored.length > 0) {
frm.doc.doctypes_to_be_ignored.forEach((row) => {
if (row.doctype_name) {
excluded_doctypes.push(row.doctype_name);
}
});
}
let filters = [
["DocType", "istable", "=", 0], // Exclude child tables
["DocType", "is_virtual", "=", 0], // Exclude virtual doctypes
];
// Only add "not in" filter if we have items to exclude
if (excluded_doctypes.length > 0) {
filters.push(["DocType", "name", "not in", excluded_doctypes]);
}
return { filters: filters };
});
},
onload: function (frm) {
if (frm.doc.docstatus == 0) {
let doctypes_to_be_ignored_array;
// Fetch protected doctypes list for filtering
frappe.call({
method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.get_protected_doctypes",
callback: function (r) {
if (r.message) {
frm.protected_doctypes_list = r.message;
}
},
});
// Fetch ignored doctypes and populate table
frappe.call({
method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.get_doctypes_to_be_ignored",
callback: function (r) {
doctypes_to_be_ignored_array = r.message;
let doctypes_to_be_ignored_array = r.message;
populate_doctypes_to_be_ignored(doctypes_to_be_ignored_array, frm);
frm.refresh_field("doctypes_to_be_ignored");
},
@@ -17,20 +62,264 @@ frappe.ui.form.on("Transaction Deletion Record", {
},
refresh: function (frm) {
if (frm.doc.docstatus == 1 && ["Queued", "Failed"].find((x) => x == frm.doc.status)) {
let execute_btn = frm.doc.status == "Queued" ? __("Start Deletion") : __("Retry");
// Override submit button to show custom confirmation
if (frm.doc.docstatus === 0 && !frm.is_new()) {
frm.page.clear_primary_action();
frm.page.set_primary_action(__("Submit"), () => {
if (!frm.doc.doctypes_to_delete || frm.doc.doctypes_to_delete.length === 0) {
frappe.msgprint(__("Please generate the To Delete list before submitting"));
return;
}
frm.add_custom_button(execute_btn, () => {
// Entry point for chain of events
let message =
`<div style='margin-bottom: 15px;'><b style='color: #d73939;'>⚠ ${__(
"Warning: This action cannot be undone!"
)}</b></div>` +
`<div style='margin-bottom: 10px;'>${__(
"You are about to permanently delete data for {0} entries for company {1}.",
[`<b>${frm.doc.doctypes_to_delete.length}</b>`, `<b>${frm.doc.company}</b>`]
)}</div>` +
`<div style='margin-bottom: 10px;'><b>${__("What will be deleted:")}</b></div>` +
`<ul style='margin-left: 20px; margin-bottom: 10px;'>` +
`<li><b>${__("DocTypes with a company field:")}</b> ${__(
"Only records belonging to {0} will be deleted",
[`<b>${frm.doc.company}</b>`]
)}</li>` +
`<li><b>${__("DocTypes without a company field:")}</b> ${__(
"ALL records will be deleted (entire DocType cleared)"
)}</li>` +
`</ul>` +
`<div style='margin-bottom: 10px; padding: 10px; background-color: #fff3cd; border: 1px solid #ffc107; border-radius: 4px;'>` +
`<b style='color: #856404;'>📦 ${__(
"IMPORTANT: Create a backup before proceeding!"
)}</b>` +
`</div>` +
`<div style='margin-top: 10px;'>${__(
"Deletion will start automatically after submission."
)}</div>`;
frappe.confirm(
message,
() => {
frm.save("Submit");
},
() => {}
);
});
}
if (frm.doc.docstatus == 0) {
frm.add_custom_button(__("Generate To Delete List"), () => {
frm.call({
method: "generate_to_delete_list",
doc: frm.doc,
callback: (r) => {
frappe.show_alert({
message: __("To Delete list generated with {0} DocTypes", [r.message.count]),
indicator: "green",
});
frm.refresh();
},
});
});
if (frm.doc.doctypes_to_delete && frm.doc.doctypes_to_delete.length > 0) {
frm.add_custom_button(
__("Export"),
() => {
open_url_post(
"/api/method/erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.export_to_delete_template",
{
name: frm.doc.name,
}
);
},
__("Template")
);
frm.add_custom_button(__("Remove Zero Counts"), () => {
let removed_count = 0;
let rows_to_keep = [];
frm.doc.doctypes_to_delete.forEach((row) => {
if (row.document_count && row.document_count > 0) {
rows_to_keep.push(row);
} else {
removed_count++;
}
});
if (removed_count === 0) {
frappe.msgprint(__("No rows with zero document count found"));
return;
}
frm.doc.doctypes_to_delete = rows_to_keep;
frm.refresh_field("doctypes_to_delete");
frm.dirty();
frappe.show_alert({
message: __(
"Removed {0} rows with zero document count. Please save to persist changes.",
[removed_count]
),
indicator: "orange",
});
});
}
frm.add_custom_button(
__("Import"),
() => {
new frappe.ui.FileUploader({
doctype: "Transaction Deletion Record",
docname: frm.doc.name,
folder: "Home/Attachments",
restrictions: {
allowed_file_types: [".csv"],
},
on_success: (file_doc) => {
frappe.call({
method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.process_import_template",
args: {
transaction_deletion_record_name: frm.doc.name,
file_url: file_doc.file_url,
},
freeze: true,
freeze_message: __("Processing import..."),
callback: (r) => {
if (r.message) {
frappe.show_alert({
message: __("Imported {0} DocTypes", [r.message.imported]),
indicator: "green",
});
frappe.model.clear_doc(frm.doctype, frm.docname);
frm.reload_doc();
}
},
});
},
});
},
__("Template")
);
}
// Only show Retry button for Failed status (deletion starts automatically on submit)
if (frm.doc.docstatus == 1 && frm.doc.status == "Failed") {
frm.add_custom_button(__("Retry"), () => {
frm.call({
method: "start_deletion_tasks",
doc: frm.doc,
callback: () => {
frappe.show_alert({
message: __("Deletion process restarted"),
indicator: "blue",
});
frm.reload_doc();
},
});
});
}
},
});
frappe.ui.form.on("Transaction Deletion Record To Delete", {
doctype_name: function (frm, cdt, cdn) {
let row = locals[cdt][cdn];
if (row.doctype_name) {
// Fetch company fields for auto-selection (only if exactly 1 field exists)
frappe.call({
method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.get_company_link_fields",
args: {
doctype_name: row.doctype_name,
},
callback: function (r) {
if (r.message && r.message.length === 1 && !row.company_field) {
frappe.model.set_value(cdt, cdn, "company_field", r.message[0]);
} else if (r.message && r.message.length > 1) {
// Show message with available options when multiple company fields exist
frappe.show_alert({
message: __("Multiple company fields available: {0}. Please select manually.", [
r.message.join(", "),
]),
indicator: "blue",
});
}
},
});
// Auto-populate child DocTypes and document count
frm.call({
method: "populate_doctype_details",
doc: frm.doc,
args: {
doctype_name: row.doctype_name,
company: frm.doc.company,
company_field: row.company_field,
},
callback: function (r) {
if (r.message) {
if (r.message.error) {
frappe.msgprint({
title: __("Error"),
indicator: "red",
message: __("Error getting details for {0}: {1}", [
row.doctype_name,
r.message.error,
]),
});
}
frappe.model.set_value(cdt, cdn, "child_doctypes", r.message.child_doctypes || "");
frappe.model.set_value(cdt, cdn, "document_count", r.message.document_count || 0);
}
},
});
}
},
company_field: function (frm, cdt, cdn) {
let row = locals[cdt][cdn];
if (row.doctype_name && row.company_field !== undefined) {
// Check for duplicates using composite key (doctype_name + company_field)
let duplicates = frm.doc.doctypes_to_delete.filter(
(r) =>
r.doctype_name === row.doctype_name &&
r.company_field === row.company_field &&
r.name !== row.name
);
if (duplicates.length > 0) {
frappe.msgprint(
__("DocType {0} with company field '{1}' is already in the list", [
row.doctype_name,
row.company_field || __("(none)"),
])
);
frappe.model.set_value(cdt, cdn, "company_field", "");
return;
}
// Recalculate document count if company_field changes
if (row.doctype_name) {
frm.call({
method: "populate_doctype_details",
doc: frm.doc,
args: {
doctype_name: row.doctype_name,
company: frm.doc.company,
company_field: row.company_field,
},
callback: function (r) {
if (r.message && r.message.document_count !== undefined) {
frappe.model.set_value(cdt, cdn, "document_count", r.message.document_count || 0);
}
},
});
}
}
},
});
function populate_doctypes_to_be_ignored(doctypes_to_be_ignored_array, frm) {
if (frm.doc.doctypes_to_be_ignored.length === 0) {
var i;

View File

@@ -11,14 +11,17 @@
"status",
"error_log",
"tasks_section",
"delete_bin_data",
"delete_leads_and_addresses",
"reset_company_default_values",
"clear_notifications",
"initialize_doctypes_table",
"delete_transactions",
"delete_bin_data_status",
"delete_leads_and_addresses_status",
"column_break_tasks_1",
"reset_company_default_values_status",
"clear_notifications_status",
"column_break_tasks_2",
"initialize_doctypes_table_status",
"delete_transactions_status",
"section_break_tbej",
"doctypes",
"doctypes_to_delete",
"doctypes_to_be_ignored",
"amended_from",
"process_in_single_transaction"
@@ -33,6 +36,7 @@
"reqd": 1
},
{
"depends_on": "eval:doc.docstatus > 0 && (!doc.doctypes_to_delete || doc.doctypes_to_delete.length == 0)",
"fieldname": "doctypes",
"fieldtype": "Table",
"label": "Summary",
@@ -41,11 +45,17 @@
"read_only": 1
},
{
"fieldname": "doctypes_to_delete",
"fieldtype": "Table",
"label": "DocTypes To Delete",
"options": "Transaction Deletion Record To Delete"
},
{
"description": "DocTypes that will NOT be deleted.",
"fieldname": "doctypes_to_be_ignored",
"fieldtype": "Table",
"label": "Excluded DocTypes",
"options": "Transaction Deletion Record Item",
"read_only": 1
"options": "Transaction Deletion Record Item"
},
{
"fieldname": "amended_from",
@@ -69,56 +79,71 @@
"fieldtype": "Section Break"
},
{
"depends_on": "eval:doc.docstatus==1",
"fieldname": "tasks_section",
"fieldtype": "Section Break",
"label": "Tasks"
},
{
"default": "0",
"fieldname": "delete_bin_data",
"fieldtype": "Check",
"default": "Pending",
"fieldname": "delete_bin_data_status",
"fieldtype": "Select",
"label": "Delete Bins",
"no_copy": 1,
"options": "Pending\nCompleted\nSkipped",
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_leads_and_addresses",
"fieldtype": "Check",
"default": "Pending",
"fieldname": "delete_leads_and_addresses_status",
"fieldtype": "Select",
"label": "Delete Leads and Addresses",
"no_copy": 1,
"options": "Pending\nCompleted\nSkipped",
"read_only": 1
},
{
"default": "0",
"fieldname": "clear_notifications",
"fieldtype": "Check",
"label": "Clear Notifications",
"no_copy": 1,
"read_only": 1
"fieldname": "column_break_tasks_1",
"fieldtype": "Column Break"
},
{
"default": "0",
"fieldname": "reset_company_default_values",
"fieldtype": "Check",
"default": "Pending",
"fieldname": "reset_company_default_values_status",
"fieldtype": "Select",
"label": "Reset Company Default Values",
"no_copy": 1,
"options": "Pending\nCompleted\nSkipped",
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_transactions",
"fieldtype": "Check",
"label": "Delete Transactions",
"default": "Pending",
"fieldname": "clear_notifications_status",
"fieldtype": "Select",
"label": "Clear Notifications",
"no_copy": 1,
"options": "Pending\nCompleted\nSkipped",
"read_only": 1
},
{
"default": "0",
"fieldname": "initialize_doctypes_table",
"fieldtype": "Check",
"fieldname": "column_break_tasks_2",
"fieldtype": "Column Break"
},
{
"default": "Pending",
"fieldname": "initialize_doctypes_table_status",
"fieldtype": "Select",
"label": "Initialize Summary Table",
"no_copy": 1,
"options": "Pending\nCompleted\nSkipped",
"read_only": 1
},
{
"default": "Pending",
"fieldname": "delete_transactions_status",
"fieldtype": "Select",
"label": "Delete Transactions",
"no_copy": 1,
"options": "Pending\nCompleted\nSkipped",
"read_only": 1
},
{
@@ -144,7 +169,7 @@
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2024-03-27 13:10:54.828051",
"modified": "2025-11-18 15:02:46.427695",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record",
@@ -165,8 +190,9 @@
"write": 1
}
],
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View File

@@ -7,6 +7,7 @@ import frappe
from frappe import _, qb
from frappe.desk.notifications import clear_notifications
from frappe.model.document import Document
from frappe.query_builder.functions import Max
from frappe.utils import cint, comma_and, create_batch, get_link_to_form
from frappe.utils.background_jobs import get_job, is_job_enqueued
from frappe.utils.caching import request_cache
@@ -19,6 +20,95 @@ LEDGER_ENTRY_DOCTYPES = frozenset(
)
)
DELETION_CACHE_TTL = 4 * 60 * 60 # 4 hours in seconds
PROTECTED_CORE_DOCTYPES = frozenset(
(
# Core Meta
"DocType",
"DocField",
"Custom Field",
"Property Setter",
"DocPerm",
"Custom DocPerm",
# User & Permissions
"User",
"Role",
"Has Role",
"User Permission",
"User Type",
# System Configuration
"Module Def",
"Workflow",
"Workflow State",
"System Settings",
# Critical System DocTypes
"File",
"Version",
"Activity Log",
"Error Log",
"Scheduled Job Type",
"Scheduled Job Log",
"Server Script",
"Client Script",
"Data Import",
"Data Export",
"Report",
"Print Format",
"Email Template",
"Assignment Rule",
"Workspace",
"Dashboard",
"Access Log",
# Transaction Deletion
"Transaction Deletion Record",
"Company",
)
)
@frappe.whitelist()
def get_protected_doctypes():
"""Get list of protected DocTypes that cannot be deleted (whitelisted for frontend)"""
frappe.only_for("System Manager")
return _get_protected_doctypes_internal()
@frappe.whitelist()
def get_company_link_fields(doctype_name):
"""Get all Company Link field names for a DocType (whitelisted for frontend autocomplete)
Args:
doctype_name: The DocType to check
Returns:
list: List of field names that link to Company DocType, ordered by field index
"""
frappe.only_for("System Manager")
if not doctype_name or not frappe.db.exists("DocType", doctype_name):
return []
return frappe.get_all(
"DocField",
filters={"parent": doctype_name, "fieldtype": "Link", "options": "Company"},
pluck="fieldname",
order_by="idx",
)
def _get_protected_doctypes_internal():
"""Internal method to get protected doctypes"""
protected = []
for doctype in PROTECTED_CORE_DOCTYPES:
if frappe.db.exists("DocType", doctype):
protected.append(doctype)
singles = frappe.get_all("DocType", filters={"issingle": 1}, pluck="name")
protected.extend(singles)
return protected
class TransactionDeletionRecord(Document):
# begin: auto-generated types
@@ -35,19 +125,23 @@ class TransactionDeletionRecord(Document):
from erpnext.setup.doctype.transaction_deletion_record_item.transaction_deletion_record_item import (
TransactionDeletionRecordItem,
)
from erpnext.setup.doctype.transaction_deletion_record_to_delete.transaction_deletion_record_to_delete import (
TransactionDeletionRecordToDelete,
)
amended_from: DF.Link | None
clear_notifications: DF.Check
clear_notifications_status: DF.Literal["Pending", "Completed", "Skipped"]
company: DF.Link
delete_bin_data: DF.Check
delete_leads_and_addresses: DF.Check
delete_transactions: DF.Check
delete_bin_data_status: DF.Literal["Pending", "Completed", "Skipped"]
delete_leads_and_addresses_status: DF.Literal["Pending", "Completed", "Skipped"]
delete_transactions_status: DF.Literal["Pending", "Completed", "Skipped"]
doctypes: DF.Table[TransactionDeletionRecordDetails]
doctypes_to_be_ignored: DF.Table[TransactionDeletionRecordItem]
doctypes_to_delete: DF.Table[TransactionDeletionRecordToDelete]
error_log: DF.LongText | None
initialize_doctypes_table: DF.Check
initialize_doctypes_table_status: DF.Literal["Pending", "Completed", "Skipped"]
process_in_single_transaction: DF.Check
reset_company_default_values: DF.Check
reset_company_default_values_status: DF.Literal["Pending", "Completed", "Skipped"]
status: DF.Literal["Queued", "Running", "Failed", "Completed", "Cancelled"]
# end: auto-generated types
@@ -71,33 +165,90 @@ class TransactionDeletionRecord(Document):
def validate(self):
frappe.only_for("System Manager")
self.validate_doctypes_to_be_ignored()
self.validate_to_delete_list()
def validate_doctypes_to_be_ignored(self):
doctypes_to_be_ignored_list = get_doctypes_to_be_ignored()
for doctype in self.doctypes_to_be_ignored:
if doctype.doctype_name not in doctypes_to_be_ignored_list:
def validate_to_delete_list(self):
"""Validate To Delete list: existence, protection status, child table exclusion, duplicates"""
if not self.doctypes_to_delete:
return
protected = _get_protected_doctypes_internal()
seen_combinations = set()
for item in self.doctypes_to_delete:
if not frappe.db.exists("DocType", item.doctype_name):
frappe.throw(_("DocType {0} does not exist").format(item.doctype_name))
# Check for duplicates using composite key
composite_key = (item.doctype_name, item.company_field or None)
if composite_key in seen_combinations:
field_desc = f" with company field '{item.company_field}'" if item.company_field else ""
frappe.throw(
_(
"DocTypes should not be added manually to the 'Excluded DocTypes' table. You are only allowed to remove entries from it."
),
title=_("Not Allowed"),
_("Duplicate entry: {0}{1}").format(item.doctype_name, field_desc),
title=_("Duplicate DocType"),
)
seen_combinations.add(composite_key)
# Validate protected DocTypes
if item.doctype_name in protected:
frappe.throw(
_("Cannot delete protected core DocType: {0}").format(item.doctype_name),
title=_("Protected DocType"),
)
is_child_table = frappe.db.get_value("DocType", item.doctype_name, "istable")
if is_child_table:
frappe.throw(
_(
"Cannot add child table {0} to deletion list. Child tables are automatically deleted with their parent DocTypes."
).format(item.doctype_name),
title=_("Child Table Not Allowed"),
)
is_virtual = frappe.db.get_value("DocType", item.doctype_name, "is_virtual")
if is_virtual:
frappe.throw(
_(
"Cannot delete virtual DocType: {0}. Virtual DocTypes do not have database tables."
).format(item.doctype_name),
title=_("Virtual DocType"),
)
# Validate company_field if specified
if item.company_field:
valid_company_fields = self._get_company_link_fields(item.doctype_name)
if item.company_field not in valid_company_fields:
frappe.throw(
_("Field '{0}' is not a valid Company link field for DocType {1}").format(
item.company_field, item.doctype_name
),
title=_("Invalid Company Field"),
)
def _is_any_doctype_in_deletion_list(self, doctypes_list):
"""Check if any DocType from the list is in the To Delete list"""
if not self.doctypes_to_delete:
return False
deletion_doctypes = {d.doctype_name for d in self.doctypes_to_delete}
return any(doctype in deletion_doctypes for doctype in doctypes_list)
def generate_job_name_for_task(self, task=None):
"""Generate unique job name for a specific task"""
method = self.task_to_internal_method_map[task]
return f"{self.name}_{method}"
def generate_job_name_for_next_tasks(self, task=None):
"""Generate job names for all tasks following the specified task"""
job_names = []
current_task_idx = list(self.task_to_internal_method_map).index(task)
for idx, task in enumerate(self.task_to_internal_method_map.keys(), 0):
# generate job_name for next tasks
if idx > current_task_idx:
job_names.append(self.generate_job_name_for_task(task))
return job_names
def generate_job_name_for_all_tasks(self):
"""Generate job names for all tasks in the deletion workflow"""
job_names = []
for task in self.task_to_internal_method_map.keys():
job_names.append(self.generate_job_name_for_task(task))
@@ -106,28 +257,28 @@ class TransactionDeletionRecord(Document):
def before_submit(self):
if queued_docs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"company": self.company, "status": ("in", ["Running", "Queued"]), "docstatus": 1},
filters={"status": ("in", ["Running", "Queued"]), "docstatus": 1},
pluck="name",
):
frappe.throw(
_(
"Cannot enqueue multi docs for one company. {0} is already queued/running for company: {1}"
).format(
comma_and([get_link_to_form("Transaction Deletion Record", x) for x in queued_docs]),
frappe.bold(self.company),
)
"Cannot start deletion. Another deletion {0} is already queued/running. Please wait for it to complete."
).format(comma_and([get_link_to_form("Transaction Deletion Record", x) for x in queued_docs]))
)
if not self.doctypes_to_delete and not self.doctypes_to_be_ignored:
frappe.throw(_("Please generate To Delete list before submitting"))
if not self.doctypes_to_be_ignored:
self.populate_doctypes_to_be_ignored_table()
def reset_task_flags(self):
self.clear_notifications = 0
self.delete_bin_data = 0
self.delete_leads_and_addresses = 0
self.delete_transactions = 0
self.initialize_doctypes_table = 0
self.reset_company_default_values = 0
self.clear_notifications_status = "Pending"
self.delete_bin_data_status = "Pending"
self.delete_leads_and_addresses_status = "Pending"
self.delete_transactions_status = "Pending"
self.initialize_doctypes_table_status = "Pending"
self.reset_company_default_values_status = "Pending"
def before_save(self):
self.status = ""
@@ -136,17 +287,288 @@ class TransactionDeletionRecord(Document):
def on_submit(self):
self.db_set("status", "Queued")
self.start_deletion_tasks()
def on_cancel(self):
self.db_set("status", "Cancelled")
self._clear_deletion_cache()
def _set_deletion_cache(self):
"""Set Redis cache flags for per-doctype validation"""
for item in self.doctypes_to_delete:
frappe.cache.set_value(
f"deletion_running_doctype:{item.doctype_name}",
self.name,
expires_in_sec=DELETION_CACHE_TTL,
)
def _clear_deletion_cache(self):
"""Clear Redis cache flags"""
for item in self.doctypes_to_delete:
frappe.cache.delete_value(f"deletion_running_doctype:{item.doctype_name}")
def _get_child_tables(self, doctype_name):
"""Get list of child table DocType names for a given DocType
Args:
doctype_name: The parent DocType to check
Returns:
list: List of child table DocType names (Table field options)
"""
return frappe.get_all(
"DocField", filters={"parent": doctype_name, "fieldtype": "Table"}, pluck="options"
)
def _get_to_delete_row_infos(self, doctype_name, company_field=None, company=None):
"""Get child tables and document count for a To Delete list row
Args:
doctype_name: The DocType to get information for
company_field: Optional company field name to filter by
company: Optional company value (defaults to self.company)
Returns:
dict: {"child_doctypes": str, "document_count": int}
"""
company = company or self.company
child_tables = self._get_child_tables(doctype_name)
child_doctypes_str = ", ".join(child_tables) if child_tables else ""
if company_field and company:
doc_count = frappe.db.count(doctype_name, filters={company_field: company})
else:
doc_count = frappe.db.count(doctype_name)
return {
"child_doctypes": child_doctypes_str,
"document_count": doc_count,
}
def _has_company_field(self, doctype_name):
"""Check if DocType has a field specifically named 'company' linking to Company"""
return frappe.db.exists(
"DocField",
{"parent": doctype_name, "fieldname": "company", "fieldtype": "Link", "options": "Company"},
)
def _get_company_link_fields(self, doctype_name):
"""Get all Company Link field names for a DocType
Args:
doctype_name: The DocType to check
Returns:
list: List of field names that link to Company DocType, ordered by field index
"""
company_fields = frappe.get_all(
"DocField",
filters={"parent": doctype_name, "fieldtype": "Link", "options": "Company"},
pluck="fieldname",
order_by="idx",
)
return company_fields or []
@frappe.whitelist()
def generate_to_delete_list(self):
"""Generate To Delete list with one row per company field"""
self.doctypes_to_delete = []
excluded = [d.doctype_name for d in self.doctypes_to_be_ignored]
excluded.extend(_get_protected_doctypes_internal())
excluded.append(self.doctype) # Exclude self
# Get all DocTypes that have Company link fields
doctypes_with_company_field = frappe.get_all(
"DocField",
filters={"fieldtype": "Link", "options": "Company"},
pluck="parent",
distinct=True,
)
# Filter to get only valid DocTypes (not child tables, not virtual, not excluded)
doctypes_with_company = []
for doctype_name in doctypes_with_company_field:
if doctype_name in excluded:
continue
# Check if doctype exists and is not a child table or virtual
if frappe.db.exists("DocType", doctype_name):
meta = frappe.get_meta(doctype_name)
if not meta.istable and not meta.is_virtual:
doctypes_with_company.append(doctype_name)
for doctype_name in doctypes_with_company:
# Get ALL company fields for this DocType
company_fields = self._get_company_link_fields(doctype_name)
# Get child tables once (same for all company fields of this DocType)
child_tables = self._get_child_tables(doctype_name)
child_doctypes_str = ", ".join(child_tables) if child_tables else ""
for company_field in company_fields:
doc_count = frappe.db.count(doctype_name, {company_field: self.company})
self.append(
"doctypes_to_delete",
{
"doctype_name": doctype_name,
"company_field": company_field,
"document_count": doc_count,
"child_doctypes": child_doctypes_str,
},
)
self.save()
return {"count": len(self.doctypes_to_delete)}
@frappe.whitelist()
def populate_doctype_details(self, doctype_name, company=None, company_field=None):
"""Get child DocTypes and document count for specified DocType
Args:
doctype_name: The DocType to get details for
company: Optional company value for filtering (defaults to self.company)
company_field: Optional company field name to use for filtering
"""
frappe.only_for("System Manager")
if not doctype_name:
return {}
if not frappe.db.exists("DocType", doctype_name):
frappe.throw(_("DocType {0} does not exist").format(doctype_name))
is_child_table = frappe.db.get_value("DocType", doctype_name, "istable")
if is_child_table:
return {
"child_doctypes": "",
"document_count": 0,
"error": _("{0} is a child table and will be deleted automatically with its parent").format(
doctype_name
),
}
try:
return self._get_to_delete_row_infos(doctype_name, company_field=company_field, company=company)
except Exception as e:
frappe.log_error(
f"Error in populate_doctype_details for {doctype_name}: {e!s}", "Transaction Deletion Record"
)
return {
"child_doctypes": "",
"document_count": 0,
"error": _("Unable to fetch DocType details. Please contact system administrator."),
}
def export_to_delete_template_method(self):
"""Export To Delete list as CSV template"""
if not self.doctypes_to_delete:
frappe.throw(_("Generate To Delete list first"))
import csv
from io import StringIO
output = StringIO()
writer = csv.writer(output)
writer.writerow(["doctype_name", "company_field", "child_doctypes"])
for item in self.doctypes_to_delete:
writer.writerow([item.doctype_name, item.company_field or "", item.child_doctypes or ""])
frappe.response["result"] = output.getvalue()
frappe.response["type"] = "csv"
frappe.response[
"doctype"
] = f"deletion_template_{self.company}_{frappe.utils.now_datetime().strftime('%Y%m%d')}"
def import_to_delete_template_method(self, csv_content):
"""Import CSV template and regenerate counts"""
import csv
from io import StringIO
reader = csv.DictReader(StringIO(csv_content))
if "doctype_name" not in (reader.fieldnames or []):
frappe.throw(_("Invalid CSV format. Expected column: doctype_name"))
self.doctypes_to_delete = []
protected = _get_protected_doctypes_internal()
imported_count = 0
skipped = []
for row in reader:
doctype_name = row.get("doctype_name", "").strip()
company_field = row.get("company_field", "").strip() or None
if not doctype_name:
continue
if doctype_name in protected:
skipped.append(_("{0}: Protected DocType").format(doctype_name))
continue
if not frappe.db.exists("DocType", doctype_name):
skipped.append(_("{0}: Not found").format(doctype_name))
continue
is_child = frappe.db.get_value("DocType", doctype_name, "istable")
if is_child:
skipped.append(_("{0}: Child table (auto-deleted with parent)").format(doctype_name))
continue
is_virtual = frappe.db.get_value("DocType", doctype_name, "is_virtual")
if is_virtual:
skipped.append(_("{0}: Virtual DocType (no database table)").format(doctype_name))
continue
db_company_fields = self._get_company_link_fields(doctype_name)
import_company_field = ""
if not db_company_fields: # Case no company field exists
details = self._get_to_delete_row_infos(doctype_name)
elif (
company_field and company_field in db_company_fields
): # Case it is provided by export and valid
details = self._get_to_delete_row_infos(doctype_name, company_field)
import_company_field = company_field
else: # Company field exists but not provided by export or invalid
if "company" in db_company_fields: # Check if 'company' is a valid field
details = self._get_to_delete_row_infos(doctype_name, "company")
import_company_field = "company"
else: # Fallback to first valid company field
details = self._get_to_delete_row_infos(doctype_name, db_company_fields[0])
import_company_field = db_company_fields[0]
self.append(
"doctypes_to_delete",
{
"doctype_name": doctype_name,
"company_field": import_company_field,
"document_count": details["document_count"],
"child_doctypes": details["child_doctypes"],
},
)
imported_count += 1
self.save()
if skipped:
frappe.msgprint(
_("Skipped {0} DocType(s):<br>{1}").format(len(skipped), "<br>".join(skipped)),
title=_("Import Summary"),
indicator="orange",
)
return {"imported": imported_count, "skipped": len(skipped)}
def enqueue_task(self, task: str | None = None):
"""Enqueue a deletion task for background execution"""
if task and task in self.task_to_internal_method_map:
# make sure that none of next tasks are already running
job_names = self.generate_job_name_for_next_tasks(task=task)
self.validate_running_task_for_doc(job_names=job_names)
# Generate Job Id to uniquely identify each task for this document
job_id = self.generate_job_name_for_task(task)
if self.process_in_single_transaction:
@@ -176,12 +598,13 @@ class TransactionDeletionRecord(Document):
message = "Traceback: <br>" + traceback
frappe.db.set_value(self.doctype, self.name, "error_log", message)
frappe.db.set_value(self.doctype, self.name, "status", "Failed")
self._clear_deletion_cache()
def delete_notifications(self):
self.validate_doc_status()
if not self.clear_notifications:
if self.clear_notifications_status == "Pending":
clear_notifications()
self.db_set("clear_notifications", 1)
self.db_set("clear_notifications_status", "Completed")
self.enqueue_task(task="Initialize Summary Table")
def populate_doctypes_to_be_ignored_table(self):
@@ -215,23 +638,46 @@ class TransactionDeletionRecord(Document):
def start_deletion_tasks(self):
# This method is the entry point for the chain of events that follow
self.db_set("status", "Running")
self._set_deletion_cache()
self.enqueue_task(task="Delete Bins")
def delete_bins(self):
self.validate_doc_status()
if not self.delete_bin_data:
if self.delete_bin_data_status == "Pending":
stock_related_doctypes = [
"Item",
"Warehouse",
"Stock Entry",
"Delivery Note",
"Purchase Receipt",
"Stock Reconciliation",
"Material Request",
"Purchase Invoice",
"Sales Invoice",
]
if not self._is_any_doctype_in_deletion_list(stock_related_doctypes):
self.db_set("delete_bin_data_status", "Skipped")
self.enqueue_task(task="Delete Leads and Addresses")
return
frappe.db.sql(
"""delete from `tabBin` where warehouse in
(select name from tabWarehouse where company=%s)""",
self.company,
)
self.db_set("delete_bin_data", 1)
self.db_set("delete_bin_data_status", "Completed")
self.enqueue_task(task="Delete Leads and Addresses")
def delete_lead_addresses(self):
"""Delete addresses to which leads are linked"""
self.validate_doc_status()
if not self.delete_leads_and_addresses:
if self.delete_leads_and_addresses_status == "Pending":
if not self._is_any_doctype_in_deletion_list(["Lead"]):
self.db_set("delete_leads_and_addresses_status", "Skipped")
self.enqueue_task(task="Reset Company Values")
return
leads = frappe.db.get_all("Lead", filters={"company": self.company}, pluck="name")
addresses = []
if leads:
@@ -268,54 +714,94 @@ class TransactionDeletionRecord(Document):
customer = qb.DocType("Customer")
qb.update(customer).set(customer.lead_name, None).where(customer.lead_name.isin(leads)).run()
self.db_set("delete_leads_and_addresses", 1)
self.db_set("delete_leads_and_addresses_status", "Completed")
self.enqueue_task(task="Reset Company Values")
def reset_company_values(self):
self.validate_doc_status()
if not self.reset_company_default_values:
if self.reset_company_default_values_status == "Pending":
sales_related_doctypes = [
"Sales Order",
"Sales Invoice",
"Quotation",
"Delivery Note",
]
if not self._is_any_doctype_in_deletion_list(sales_related_doctypes):
self.db_set("reset_company_default_values_status", "Skipped")
self.enqueue_task(task="Clear Notifications")
return
company_obj = frappe.get_doc("Company", self.company)
company_obj.total_monthly_sales = 0
company_obj.sales_monthly_history = None
company_obj.save()
self.db_set("reset_company_default_values", 1)
self.db_set("reset_company_default_values_status", "Completed")
self.enqueue_task(task="Clear Notifications")
def initialize_doctypes_to_be_deleted_table(self):
"""Initialize deletion table from To Delete list or fall back to original logic"""
self.validate_doc_status()
if not self.initialize_doctypes_table:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
if self.initialize_doctypes_table_status == "Pending":
# Use To Delete list if available (new behavior)
if not self.doctypes_to_delete:
frappe.throw(
_("No DocTypes in To Delete list. Please generate or import the list before submitting."),
title=_("Empty To Delete List"),
)
tables = self.get_all_child_doctypes()
for docfield in docfields:
if docfield["parent"] != self.doctype:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"]
for to_delete_item in self.doctypes_to_delete:
if to_delete_item.document_count > 0:
# Add parent DocType only - child tables are handled automatically
# by delete_child_tables() when the parent is deleted
# Use company_field directly from To Delete item
self.populate_doctypes_table(
tables, to_delete_item.doctype_name, to_delete_item.company_field, 0
)
if no_of_docs > 0:
# Initialize
self.populate_doctypes_table(tables, docfield["parent"], docfield["fieldname"], 0)
self.db_set("initialize_doctypes_table", 1)
self.db_set("initialize_doctypes_table_status", "Completed")
self.enqueue_task(task="Delete Transactions")
def delete_company_transactions(self):
self.validate_doc_status()
if not self.delete_transactions:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
if self.delete_transactions_status == "Pending":
protected_doctypes = _get_protected_doctypes_internal()
self.get_all_child_doctypes()
for docfield in self.doctypes:
if docfield.doctype_name != self.doctype and not docfield.done:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield.doctype_name, docfield.docfield_name
)
if no_of_docs > 0:
reference_docs = frappe.get_all(
docfield.doctype_name,
filters={docfield.docfield_name: self.company},
limit=self.batch_size,
if docfield.doctype_name in protected_doctypes:
error_msg = (
f"CRITICAL: Attempted to delete protected DocType: {docfield.doctype_name}"
)
frappe.log_error(error_msg, "Transaction Deletion Security")
frappe.throw(
_("Cannot delete protected core DocType: {0}").format(docfield.doctype_name),
title=_("Protected DocType"),
)
# Get company_field from stored value (could be any Company link field)
company_field = docfield.docfield_name
if company_field:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield.doctype_name, company_field
)
else:
no_of_docs = frappe.db.count(docfield.doctype_name)
if no_of_docs > 0:
if company_field:
reference_docs = frappe.get_all(
docfield.doctype_name,
filters={company_field: self.company},
fields=["name"],
limit=self.batch_size,
)
else:
reference_docs = frappe.get_all(
docfield.doctype_name, fields=["name"], limit=self.batch_size
)
reference_doc_names = [r.name for r in reference_docs]
self.delete_version_log(docfield.doctype_name, reference_doc_names)
@@ -329,26 +815,38 @@ class TransactionDeletionRecord(Document):
processed = int(docfield.no_of_docs) + len(reference_doc_names)
frappe.db.set_value(docfield.doctype, docfield.name, "no_of_docs", processed)
else:
# reset naming series
naming_series = frappe.db.get_value("DocType", docfield.doctype_name, "autoname")
if naming_series:
if "#" in naming_series:
self.update_naming_series(naming_series, docfield.doctype_name)
frappe.db.set_value(docfield.doctype, docfield.name, "done", 1)
to_delete_row = frappe.db.get_value(
"Transaction Deletion Record To Delete",
{
"parent": self.name,
"doctype_name": docfield.doctype_name,
"company_field": company_field,
},
"name",
)
if to_delete_row:
frappe.db.set_value(
"Transaction Deletion Record To Delete", to_delete_row, "deleted", 1
)
pending_doctypes = frappe.db.get_all(
"Transaction Deletion Record Details",
filters={"parent": self.name, "done": 0},
pluck="doctype_name",
)
if pending_doctypes:
# as method is enqueued after commit, calling itself will not make validate_doc_status to throw
# recursively call this task to delete all transactions
self.enqueue_task(task="Delete Transactions")
else:
self.db_set("status", "Completed")
self.db_set("delete_transactions", 1)
self.db_set("delete_transactions_status", "Completed")
self.db_set("error_log", None)
self._clear_deletion_cache()
def get_doctypes_to_be_ignored_list(self):
doctypes_to_be_ignored_list = frappe.get_all(
@@ -378,18 +876,33 @@ class TransactionDeletionRecord(Document):
def get_number_of_docs_linked_with_specified_company(self, doctype, company_fieldname):
return frappe.db.count(doctype, {company_fieldname: self.company})
def populate_doctypes_table(self, tables, doctype, fieldname, no_of_docs):
def get_company_field(self, doctype_name):
"""Get company field name for a DocType"""
return frappe.db.get_value(
"DocField",
{"parent": doctype_name, "fieldtype": "Link", "options": "Company"},
"fieldname",
)
def populate_doctypes_table(self, tables, doctype, company_field, no_of_docs):
"""Add doctype to processing tracker
Args:
tables: List of child table DocType names (to exclude)
doctype: DocType name to track
company_field: Company link field name (or None)
no_of_docs: Initial count
"""
self.flags.ignore_validate_update_after_submit = True
if doctype not in tables:
self.append(
"doctypes", {"doctype_name": doctype, "docfield_name": fieldname, "no_of_docs": no_of_docs}
"doctypes",
{"doctype_name": doctype, "docfield_name": company_field, "no_of_docs": no_of_docs},
)
self.save(ignore_permissions=True)
def delete_child_tables(self, doctype, reference_doc_names):
child_tables = frappe.get_all(
"DocField", filters={"fieldtype": "Table", "parent": doctype}, pluck="options"
)
child_tables = self._get_child_tables(doctype)
for table in child_tables:
frappe.db.delete(table, {"parent": ["in", reference_doc_names]})
@@ -397,22 +910,52 @@ class TransactionDeletionRecord(Document):
def delete_docs_linked_with_specified_company(self, doctype, reference_doc_names):
frappe.db.delete(doctype, {"name": ("in", reference_doc_names)})
def update_naming_series(self, naming_series, doctype_name):
@staticmethod
def get_naming_series_prefix(naming_series: str, doctype_name: str) -> str:
"""Extract the static prefix from an autoname pattern.
Args:
naming_series: The autoname pattern (e.g., "PREFIX.####", "format:PRE-{####}")
doctype_name: DocType name for error logging
Returns:
The static prefix before the counter placeholders
"""
if "." in naming_series:
prefix, hashes = naming_series.rsplit(".", 1)
prefix = naming_series.rsplit(".", 1)[0]
elif "{" in naming_series:
prefix = naming_series.rsplit("{", 1)[0]
else:
prefix, hashes = naming_series.rsplit("{", 1)
last = frappe.db.sql(
f"""select max(name) from `tab{doctype_name}`
where name like %s""",
prefix + "%",
# Fallback for unexpected patterns (shouldn't happen with valid Frappe naming series)
frappe.log_error(
title=_("Unexpected Naming Series Pattern"),
message=_(
"Naming series '{0}' for DocType '{1}' does not contain standard '.' or '{{' separator. Using fallback extraction."
).format(naming_series, doctype_name),
)
prefix = naming_series.split("#", 1)[0] if "#" in naming_series else naming_series
return prefix
def update_naming_series(self, naming_series, doctype_name):
# Derive a static prefix from the autoname pattern
prefix = self.get_naming_series_prefix(naming_series, doctype_name)
# Find the highest number used in the naming series to reset the counter
doctype_table = qb.DocType(doctype_name)
result = (
qb.from_(doctype_table)
.select(Max(doctype_table.name))
.where(doctype_table.name.like(prefix + "%"))
.run()
)
if last and last[0][0]:
last = cint(last[0][0].replace(prefix, ""))
if result and result[0][0]:
last = cint(result[0][0].replace(prefix, ""))
else:
last = 0
frappe.db.sql("""update `tabSeries` set current = %s where name=%s""", (last, prefix))
frappe.db.set_value("Series", prefix, "current", last, update_modified=False)
def delete_version_log(self, doctype, docnames):
versions = qb.DocType("Version")
@@ -487,15 +1030,61 @@ def get_doctypes_to_be_ignored():
return doctypes_to_be_ignored
@frappe.whitelist()
def export_to_delete_template(name):
"""Export To Delete list as CSV via URL access"""
frappe.only_for("System Manager")
doc = frappe.get_doc("Transaction Deletion Record", name)
doc.check_permission("read")
return doc.export_to_delete_template_method()
@frappe.whitelist()
def process_import_template(transaction_deletion_record_name, file_url):
"""Import CSV template and populate To Delete list"""
import os
doc = frappe.get_doc("Transaction Deletion Record", transaction_deletion_record_name)
doc.check_permission("write")
if not file_url or ".." in file_url:
frappe.throw(_("Invalid file URL"))
try:
file_doc = frappe.get_doc("File", {"file_url": file_url})
except frappe.DoesNotExistError:
frappe.throw(_("File not found"))
if (
file_doc.attached_to_doctype != "Transaction Deletion Record"
or file_doc.attached_to_name != transaction_deletion_record_name
):
frappe.throw(_("File does not belong to this Transaction Deletion Record"))
if not file_doc.file_name or not file_doc.file_name.lower().endswith(".csv"):
frappe.throw(_("Only CSV files are allowed"))
file_path = file_doc.get_full_path()
if not os.path.isfile(file_path):
frappe.throw(_("File not found on server"))
with open(file_path, encoding="utf-8") as f:
csv_content = f.read()
return doc.import_to_delete_template_method(csv_content)
@frappe.whitelist()
@request_cache
def is_deletion_doc_running(company: str | None = None, err_msg: str | None = None):
if not company:
return
"""Check if any deletion is running globally
The company parameter is kept for backwards compatibility but is now ignored.
"""
running_deletion_job = frappe.db.get_value(
"Transaction Deletion Record",
{"docstatus": 1, "company": company, "status": "Running"},
{"docstatus": 1, "status": ("in", ["Running", "Queued"])},
"name",
)
@@ -504,17 +1093,28 @@ def is_deletion_doc_running(company: str | None = None, err_msg: str | None = No
frappe.throw(
title=_("Deletion in Progress!"),
msg=_("Transaction Deletion Document: {0} is running for this Company. {1}").format(
msg=_("Transaction Deletion Record {0} is already running. {1}").format(
get_link_to_form("Transaction Deletion Record", running_deletion_job), err_msg or ""
),
)
def check_for_running_deletion_job(doc, method=None):
# Check if DocType has 'company' field
if doc.doctype in LEDGER_ENTRY_DOCTYPES or not doc.meta.has_field("company"):
"""Hook function called on document validate - checks Redis cache for running deletions"""
if doc.doctype in LEDGER_ENTRY_DOCTYPES:
return
is_deletion_doc_running(
doc.company, _("Cannot make any transactions until the deletion job is completed")
)
if doc.doctype in PROTECTED_CORE_DOCTYPES:
return
deletion_name = frappe.cache.get_value(f"deletion_running_doctype:{doc.doctype}")
if deletion_name:
frappe.throw(
title=_("Deletion in Progress!"),
msg=_(
"Transaction Deletion Record {0} is currently deleting {1}. Cannot save documents until deletion completes."
).format(
get_link_to_form("Transaction Deletion Record", deletion_name), frappe.bold(doc.doctype)
),
)

View File

@@ -17,17 +17,19 @@
"reqd": 1
}
],
"grid_page_length": 50,
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-03-27 13:10:55.128861",
"modified": "2025-11-14 16:17:47.755531",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record Item",
"owner": "Administrator",
"permissions": [],
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View File

@@ -0,0 +1,67 @@
{
"actions": [],
"creation": "2025-11-14 00:00:00",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"doctype_name",
"company_field",
"document_count",
"child_doctypes",
"deleted"
],
"fields": [
{
"fieldname": "doctype_name",
"fieldtype": "Link",
"in_list_view": 1,
"label": "DocType",
"options": "DocType"
},
{
"description": "Company link field name used for filtering (optional - leave empty to delete all records)",
"fieldname": "company_field",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Company Field"
},
{
"fieldname": "document_count",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Document Count",
"read_only": 1
},
{
"description": "Child tables that will also be deleted",
"fieldname": "child_doctypes",
"fieldtype": "Small Text",
"in_list_view": 1,
"label": "Child DocTypes",
"read_only": 1
},
{
"default": "0",
"fieldname": "deleted",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Deleted",
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2025-11-14 16:17:04.494126",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record To Delete",
"owner": "Administrator",
"permissions": [],
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View File

@@ -0,0 +1,27 @@
# Copyright (c) 2025, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TransactionDeletionRecordToDelete(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
child_doctypes: DF.SmallText | None
company_field: DF.Data | None
deleted: DF.Check
doctype_name: DF.Link | None
document_count: DF.Int
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
# end: auto-generated types
pass

View File

@@ -202,7 +202,7 @@ def enable_all_roles_and_domains():
def _enable_all_roles_for_admin():
from frappe.desk.page.setup_wizard.setup_wizard import add_all_roles_to
all_roles = set(frappe.db.get_values("Role", pluck="name"))
all_roles = set(frappe.get_all("Role", pluck="name"))
admin_roles = set(
frappe.db.get_values("Has Role", {"parent": "Administrator"}, fieldname="role", pluck="role")
)

View File

@@ -22,6 +22,7 @@
"reserved_stock",
"section_break_pmrs",
"stock_uom",
"company",
"column_break_0slj",
"valuation_rate",
"stock_value"
@@ -132,6 +133,14 @@
"options": "UOM",
"read_only": 1
},
{
"fetch_from": "warehouse.company",
"fieldname": "company",
"fieldtype": "Link",
"label": "Company",
"options": "Company",
"read_only": 1
},
{
"fieldname": "valuation_rate",
"fieldtype": "Float",
@@ -186,7 +195,7 @@
"idx": 1,
"in_create": 1,
"links": [],
"modified": "2024-03-27 13:06:39.414036",
"modified": "2026-02-01 08:11:46.824913",
"modified_by": "Administrator",
"module": "Stock",
"name": "Bin",
@@ -231,8 +240,9 @@
}
],
"quick_entry": 1,
"row_format": "Dynamic",
"search_fields": "item_code,warehouse",
"sort_field": "creation",
"sort_order": "ASC",
"states": []
}
}

View File

@@ -19,6 +19,7 @@ class Bin(Document):
from frappe.types import DF
actual_qty: DF.Float
company: DF.Link | None
indented_qty: DF.Float
item_code: DF.Link
ordered_qty: DF.Float

View File

@@ -1070,7 +1070,7 @@
"no_copy": 1,
"oldfieldname": "status",
"oldfieldtype": "Select",
"options": "\nDraft\nTo Bill\nCompleted\nReturn\nReturn Issued\nCancelled\nClosed",
"options": "\nDraft\nTo Bill\nPartially Billed\nCompleted\nReturn\nReturn Issued\nCancelled\nClosed",
"print_hide": 1,
"print_width": "150px",
"read_only": 1,
@@ -1426,14 +1426,15 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"icon": "fa fa-truck",
"idx": 146,
"is_submittable": 1,
"links": [],
"modified": "2025-12-02 23:55:25.415443",
"modified": "2026-02-03 12:27:19.055918",
"modified_by": "Administrator",
"module": "Stock",
"name": "Delivery Note",

View File

@@ -127,7 +127,15 @@ class DeliveryNote(SellingController):
shipping_address_name: DF.Link | None
shipping_rule: DF.Link | None
status: DF.Literal[
"", "Draft", "To Bill", "Completed", "Return", "Return Issued", "Cancelled", "Closed"
"",
"Draft",
"To Bill",
"Partially Billed",
"Completed",
"Return",
"Return Issued",
"Cancelled",
"Closed",
]
tax_category: DF.Link | None
tax_id: DF.Data | None

View File

@@ -18,8 +18,10 @@ frappe.listview_settings["Delivery Note"] = {
return [__("Closed"), "green", "status,=,Closed"];
} else if (doc.status === "Return Issued") {
return [__("Return Issued"), "grey", "status,=,Return Issued"];
} else if (flt(doc.per_billed, 2) < 100) {
return [__("To Bill"), "orange", "per_billed,<,100|docstatus,=,1"];
} else if (flt(doc.per_billed) == 0) {
return [__("To Bill"), "orange", "per_billed,=,0|docstatus,=,1"];
} else if (flt(doc.per_billed, 2) > 0 && flt(doc.per_billed, 2) < 100) {
return [__("Partially Billed"), "yellow", "per_billed,<,100|docstatus,=,1"];
} else if (flt(doc.per_billed, 2) === 100) {
return [__("Completed"), "green", "per_billed,=,100|docstatus,=,1"];
}

View File

@@ -1101,7 +1101,8 @@ class TestDeliveryNote(IntegrationTestCase):
self.assertEqual(dn2.get("items")[0].billed_amt, 400)
self.assertEqual(dn2.per_billed, 80)
self.assertEqual(dn2.status, "To Bill")
# Since 20% of DN2 is yet to be billed, it should be classified as partially billed.
self.assertEqual(dn2.status, "Partially Billed")
def test_dn_billing_status_case4(self):
# SO -> SI -> DN
@@ -2864,6 +2865,23 @@ class TestDeliveryNote(IntegrationTestCase):
for entry in sabb.entries:
self.assertEqual(entry.incoming_rate, 200)
@IntegrationTestCase.change_settings("Selling Settings", {"validate_selling_price": 1})
def test_validate_selling_price(self):
item_code = make_item("VSP Item", properties={"is_stock_item": 1}).name
make_stock_entry(item_code=item_code, target="_Test Warehouse - _TC", qty=1, basic_rate=10)
make_stock_entry(item_code=item_code, target="_Test Warehouse - _TC", qty=1, basic_rate=1)
dn = create_delivery_note(
item_code=item_code,
qty=1,
rate=9,
do_not_save=True,
)
self.assertRaises(frappe.ValidationError, dn.save)
dn.items[0].incoming_rate = 0
dn.items[0].stock_qty = 2
dn.save()
def create_delivery_note(**args):
dn = frappe.new_doc("Delivery Note")

View File

@@ -1282,7 +1282,8 @@
"hidden": 1,
"label": "Item Wise Tax Details",
"no_copy": 1,
"options": "Item Wise Tax Detail"
"options": "Item Wise Tax Detail",
"print_hide": 1
}
],
"grid_page_length": 50,
@@ -1290,7 +1291,7 @@
"idx": 261,
"is_submittable": 1,
"links": [],
"modified": "2025-11-27 16:46:30.210628",
"modified": "2026-01-29 21:24:30.652933",
"modified_by": "Administrator",
"module": "Stock",
"name": "Purchase Receipt",

View File

@@ -5036,6 +5036,206 @@ class TestPurchaseReceipt(IntegrationTestCase):
return_pr = make_return_doc("Purchase Receipt", pr.name)
self.assertRaises(frappe.ValidationError, return_pr.submit)
def test_internal_purchase_receipt_incoming_rate_with_lcv(self):
"""
To test inter branch transaction incoming rate calculation with lcv after item reposting
"""
from erpnext.stock.doctype.delivery_note.delivery_note import make_inter_company_purchase_receipt
from erpnext.stock.doctype.delivery_note.test_delivery_note import create_delivery_note
prepare_data_for_internal_transfer()
customer = "_Test Internal Customer 2"
company = "_Test Company with perpetual inventory"
item_doc = create_item("_Test Internal PR LCV Item")
lcv_expense_account = "Expenses Included In Valuation - TCP1"
from_warehouse = create_warehouse("_Test Internal From Warehouse LCV", company=company)
to_warehouse = create_warehouse("_Test Internal To Warehouse LCV", company=company)
# inward qty for internal transactions
make_purchase_receipt(
item_code=item_doc.item_code,
qty=5,
rate=100,
company="_Test Company with perpetual inventory",
warehouse=from_warehouse,
)
idn = create_delivery_note(
item_code=item_doc.name,
company=company,
customer=customer,
cost_center="Main - TCP1",
expense_account="Cost of Goods Sold - TCP1",
qty=5,
rate=100,
warehouse=from_warehouse,
target_warehouse=to_warehouse,
)
self.assertEqual(idn.items[0].rate, 100)
ipr = make_inter_company_purchase_receipt(idn.name)
ipr.items[0].warehouse = from_warehouse
self.assertEqual(ipr.items[0].rate, 100)
ipr.submit()
self.create_lcv(ipr.doctype, ipr.name, company, lcv_expense_account, charges=100)
ipr.reload()
self.assertEqual(ipr.items[0].landed_cost_voucher_amount, 100)
self.assertEqual(ipr.items[0].valuation_rate, 120)
# repost the receipt and check the stock ledger values
repost_doc = frappe.new_doc("Repost Item Valuation")
repost_doc.update(
{
"based_on": "Transaction",
"voucher_type": ipr.doctype,
"voucher_no": ipr.name,
"posting_date": ipr.posting_date,
"posting_time": ipr.posting_time,
"company": ipr.company,
"allow_negative_stock": 1,
"via_landed_cost_voucher": 0,
"allow_zero_rate": 0,
}
)
repost_doc.save()
repost_doc.submit()
stk_ledger = frappe.db.get_value(
"Stock Ledger Entry",
{"voucher_type": "Purchase Receipt", "voucher_no": ipr.name, "warehouse": from_warehouse},
["incoming_rate", "stock_value_difference"],
as_dict=True,
)
# check the incoming rate and stock value change
self.assertEqual(stk_ledger.incoming_rate, 120)
self.assertEqual(stk_ledger.stock_value_difference, 600)
def test_negative_stock_error_for_purchase_return_when_stock_exists_in_future_date(self):
from erpnext.controllers.sales_and_purchase_return import make_return_doc
from erpnext.stock.doctype.stock_entry.test_stock_entry import make_stock_entry
from erpnext.stock.stock_ledger import NegativeStockError
item_code = make_item(
"Test Negative Stock for Purchase Return with Future Stock Item",
{
"is_stock_item": 1,
"has_batch_no": 1,
"create_new_batch": 1,
"batch_number_series": "TNSPFPRI.#####",
},
).name
make_purchase_receipt(
item_code=item_code,
posting_date=add_days(today(), -4),
qty=100,
rate=100,
warehouse="_Test Warehouse - _TC",
)
pr1 = make_purchase_receipt(
item_code=item_code,
posting_date=add_days(today(), -3),
qty=100,
rate=100,
warehouse="_Test Warehouse - _TC",
)
batch1 = get_batch_from_bundle(pr1.items[0].serial_and_batch_bundle)
pr2 = make_purchase_receipt(
item_code=item_code,
posting_date=add_days(today(), -2),
qty=100,
rate=100,
warehouse="_Test Warehouse - _TC",
)
batch2 = get_batch_from_bundle(pr2.items[0].serial_and_batch_bundle)
make_stock_entry(
item_code=item_code,
qty=100,
posting_date=add_days(today(), -1),
source="_Test Warehouse - _TC",
target="_Test Warehouse 1 - _TC",
batch_no=batch1,
use_serial_batch_fields=1,
)
make_stock_entry(
item_code=item_code,
qty=100,
posting_date=add_days(today(), -1),
source="_Test Warehouse - _TC",
target="_Test Warehouse 1 - _TC",
batch_no=batch2,
use_serial_batch_fields=1,
)
make_stock_entry(
item_code=item_code,
qty=100,
posting_date=today(),
source="_Test Warehouse 1 - _TC",
target="_Test Warehouse - _TC",
batch_no=batch1,
use_serial_batch_fields=1,
)
make_purchase_entry = make_return_doc("Purchase Receipt", pr1.name)
make_purchase_entry.set_posting_time = 1
make_purchase_entry.posting_date = pr1.posting_date
self.assertRaises(NegativeStockError, make_purchase_entry.submit)
def test_purchase_return_from_different_warehouse(self):
from erpnext.controllers.sales_and_purchase_return import make_return_doc
from erpnext.stock.doctype.stock_entry.test_stock_entry import make_stock_entry
item_code = make_item(
"Test Purchase Return From Different Warehouse Item",
{
"is_stock_item": 1,
"has_batch_no": 1,
"create_new_batch": 1,
"batch_number_series": "TPRFDWU.#####",
},
).name
pr1 = make_purchase_receipt(
item_code=item_code,
posting_date=add_days(today(), -4),
qty=100,
rate=100,
warehouse="_Test Warehouse - _TC",
)
batch1 = get_batch_from_bundle(pr1.items[0].serial_and_batch_bundle)
make_stock_entry(
item_code=item_code,
qty=100,
posting_date=add_days(today(), -1),
source="_Test Warehouse - _TC",
target="_Test Warehouse 1 - _TC",
batch_no=batch1,
use_serial_batch_fields=1,
)
make_purchase_entry = make_return_doc("Purchase Receipt", pr1.name)
make_purchase_entry.items[0].warehouse = "_Test Warehouse 1 - _TC"
make_purchase_entry.submit()
make_purchase_entry.reload()
sabb = frappe.get_doc("Serial and Batch Bundle", make_purchase_entry.items[0].serial_and_batch_bundle)
for row in sabb.entries:
self.assertEqual(row.warehouse, "_Test Warehouse 1 - _TC")
self.assertEqual(row.incoming_rate, 100)
def prepare_data_for_internal_transfer():
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_internal_supplier

View File

@@ -17,6 +17,7 @@ from frappe.utils import (
cint,
cstr,
flt,
get_datetime,
get_link_to_form,
getdate,
now,
@@ -439,6 +440,8 @@ class SerialandBatchBundle(Document):
)
def get_valuation_rate_for_return_entry(self, return_against):
from erpnext.controllers.sales_and_purchase_return import get_warehouses_for_return
if not self.voucher_detail_no:
return {}
@@ -468,9 +471,11 @@ class SerialandBatchBundle(Document):
["Serial and Batch Bundle", "voucher_detail_no", "=", return_against_voucher_detail_no],
]
# Added to handle rejected warehouse case
if self.voucher_type in ["Purchase Receipt", "Purchase Invoice"]:
# Added to handle rejected warehouse case
filters.append(["Serial and Batch Entry", "warehouse", "=", self.warehouse])
warehouses = get_warehouses_for_return(self.voucher_type, return_against_voucher_detail_no)
if self.warehouse in warehouses:
filters.append(["Serial and Batch Entry", "warehouse", "=", self.warehouse])
bundle_data = frappe.get_all(
"Serial and Batch Bundle",
@@ -1452,31 +1457,44 @@ class SerialandBatchBundle(Document):
for d in self.entries:
available_qty = batch_wise_available_qty.get(d.batch_no, 0)
if flt(available_qty, precision) < 0:
frappe.throw(
_(
"""
The Batch {0} of an item {1} has negative stock in the warehouse {2}. Please add a stock quantity of {3} to proceed with this entry."""
).format(
bold(d.batch_no),
bold(self.item_code),
bold(self.warehouse),
bold(abs(flt(available_qty, precision))),
),
title=_("Negative Stock Error"),
)
self.throw_negative_batch(d.batch_no, available_qty, precision)
def throw_negative_batch(self, batch_no, available_qty, precision):
from erpnext.stock.stock_ledger import NegativeStockError
frappe.throw(
_(
"""
The Batch {0} of an item {1} has negative stock in the warehouse {2}. Please add a stock quantity of {3} to proceed with this entry."""
).format(
bold(batch_no),
bold(self.item_code),
bold(self.warehouse),
bold(abs(flt(available_qty, precision))),
),
title=_("Negative Stock Error"),
exc=NegativeStockError,
)
def get_batchwise_available_qty(self):
available_qty = self.get_available_qty_from_sabb()
available_qty_from_ledger = self.get_available_qty_from_stock_ledger()
batchwise_entries = self.get_available_qty_from_sabb()
batchwise_entries.extend(self.get_available_qty_from_stock_ledger())
if not available_qty_from_ledger:
return available_qty
available_qty = frappe._dict({})
batchwise_entries = sorted(
batchwise_entries,
key=lambda x: (get_datetime(x.get("posting_datetime")), get_datetime(x.get("creation"))),
)
for batch_no, qty in available_qty_from_ledger.items():
if batch_no in available_qty:
available_qty[batch_no] += qty
precision = frappe.get_precision("Serial and Batch Entry", "qty")
for row in batchwise_entries:
if row.batch_no in available_qty:
available_qty[row.batch_no] += flt(row.qty)
else:
available_qty[batch_no] = qty
available_qty[row.batch_no] = flt(row.qty)
if flt(available_qty[row.batch_no], precision) < 0:
self.throw_negative_batch(row.batch_no, available_qty[row.batch_no], precision)
return available_qty
@@ -1489,7 +1507,9 @@ class SerialandBatchBundle(Document):
frappe.qb.from_(sle)
.select(
sle.batch_no,
Sum(sle.actual_qty).as_("available_qty"),
sle.actual_qty.as_("qty"),
sle.posting_datetime,
sle.creation,
)
.where(
(sle.item_code == self.item_code)
@@ -1501,12 +1521,9 @@ class SerialandBatchBundle(Document):
& (sle.batch_no.isnotnull())
)
.for_update()
.groupby(sle.batch_no)
)
res = query.run(as_list=True)
return frappe._dict(res) if res else frappe._dict()
return query.run(as_dict=True)
def get_available_qty_from_sabb(self):
batches = [d.batch_no for d in self.entries if d.batch_no]
@@ -1517,7 +1534,9 @@ class SerialandBatchBundle(Document):
frappe.qb.from_(child)
.select(
child.batch_no,
Sum(child.qty).as_("available_qty"),
child.qty,
child.posting_datetime,
child.creation,
)
.where(
(child.item_code == self.item_code)
@@ -1528,13 +1547,10 @@ class SerialandBatchBundle(Document):
& (child.type_of_transaction.isin(["Inward", "Outward"]))
)
.for_update()
.groupby(child.batch_no)
)
query = query.where(child.voucher_type != "Pick List")
res = query.run(as_list=True)
return frappe._dict(res) if res else frappe._dict()
return query.run(as_dict=True)
def validate_voucher_no_docstatus(self):
if self.voucher_type == "POS Invoice":
@@ -2597,11 +2613,11 @@ def get_reserved_batches_for_pos(kwargs) -> dict:
key = (row.batch_no, row.warehouse)
if key in pos_batches:
pos_batches[key]["qty"] -= row.qty * -1 if row.is_return else row.qty
pos_batches[key]["qty"] += row.qty * -1
else:
pos_batches[key] = frappe._dict(
{
"qty": (row.qty * -1 if not row.is_return else row.qty),
"qty": row.qty * -1,
"warehouse": row.warehouse,
}
)

View File

@@ -258,6 +258,7 @@ class StockEntry(StockController, SubcontractingInwardController):
self.validate_job_card_item()
self.set_purpose_for_stock_entry()
self.clean_serial_nos()
self.validate_repack_entry()
if not self.from_bom:
self.fg_completed_qty = 0.0
@@ -282,6 +283,20 @@ class StockEntry(StockController, SubcontractingInwardController):
super().validate_subcontracting_inward()
def validate_repack_entry(self):
if self.purpose != "Repack":
return
fg_items = {row.item_code: row for row in self.items if row.is_finished_item}
if len(fg_items) > 1 and not all(row.set_basic_rate_manually for row in fg_items.values()):
frappe.throw(
_(
"When there are multiple finished goods ({0}) in a Repack stock entry, the basic rate for all finished goods must be set manually. To set rate manually, enable the checkbox 'Set Basic Rate Manually' in the respective finished good row."
).format(", ".join(fg_items)),
title=_("Set Basic Rate Manually"),
)
def validate_raw_materials_exists(self):
if self.purpose not in ["Manufacture", "Repack", "Disassemble"]:
return

View File

@@ -413,6 +413,10 @@ class TestStockEntry(IntegrationTestCase):
},
)
repack.set_stock_entry_type()
for row in repack.items:
if row.t_warehouse:
row.set_basic_rate_manually = 1
repack.insert()
self.assertEqual(repack.items[1].is_finished_item, 1)

View File

@@ -1272,15 +1272,11 @@ def get_items(warehouse, posting_date, posting_time, company, item_code=None, ig
for d in items:
if (d.item_code, d.warehouse) in itemwise_batch_data:
valuation_rate = get_stock_balance(
d.item_code, d.warehouse, posting_date, posting_time, with_valuation_rate=True
)[1]
for row in itemwise_batch_data.get((d.item_code, d.warehouse)):
if ignore_empty_stock and not row.qty:
continue
args = get_item_data(row, row.qty, valuation_rate)
args = get_item_data(row, row.qty, row.valuation_rate)
res.append(args)
else:
stock_bal = get_stock_balance(
@@ -1414,6 +1410,7 @@ def get_itemwise_batch(warehouse, posting_date, company, item_code=None):
"item_code": row[0],
"warehouse": row[3],
"qty": row[8],
"valuation_rate": row[9],
"item_name": row[1],
"batch_no": row[4],
}

View File

@@ -282,7 +282,11 @@ class StockBalanceReport:
for field in self.inventory_dimensions:
qty_dict[field] = entry.get(field)
if entry.voucher_type == "Stock Reconciliation" and (not entry.batch_no or entry.serial_no):
if (
entry.voucher_type == "Stock Reconciliation"
and frappe.get_cached_value(entry.voucher_type, entry.voucher_no, "purpose") != "Opening Stock"
and (not entry.batch_no or entry.serial_no)
):
qty_diff = flt(entry.qty_after_transaction) - flt(qty_dict.bal_qty)
else:
qty_diff = flt(entry.actual_qty)

View File

@@ -3,6 +3,7 @@
import frappe
from frappe.query_builder.functions import Coalesce, Sum
from frappe.utils import cstr, flt, now, nowdate, nowtime
from erpnext.controllers.stock_controller import create_repost_item_valuation_entry
@@ -182,18 +183,67 @@ def get_indented_qty(item_code, warehouse):
def get_ordered_qty(item_code, warehouse):
ordered_qty = frappe.db.sql(
"""
select sum((po_item.qty - po_item.received_qty)*po_item.conversion_factor)
from `tabPurchase Order Item` po_item, `tabPurchase Order` po
where po_item.item_code=%s and po_item.warehouse=%s
and po_item.qty > po_item.received_qty and po_item.parent=po.name
and po.status not in ('Closed', 'Delivered') and po.docstatus=1
and po_item.delivered_by_supplier = 0""",
(item_code, warehouse),
"""Return total pending ordered quantity for an item in a warehouse.
Includes outstanding quantities from Purchase Orders and Subcontracting Orders"""
purchase_order_qty = get_purchase_order_qty(item_code, warehouse)
subcontracting_order_qty = get_subcontracting_order_qty(item_code, warehouse)
return flt(purchase_order_qty) + flt(subcontracting_order_qty)
def get_purchase_order_qty(item_code, warehouse):
PurchaseOrder = frappe.qb.DocType("Purchase Order")
PurchaseOrderItem = frappe.qb.DocType("Purchase Order Item")
purchase_order_qty = (
frappe.qb.from_(PurchaseOrderItem)
.join(PurchaseOrder)
.on(PurchaseOrderItem.parent == PurchaseOrder.name)
.select(
Sum(
(PurchaseOrderItem.qty - PurchaseOrderItem.received_qty) * PurchaseOrderItem.conversion_factor
)
)
.where(
(PurchaseOrderItem.item_code == item_code)
& (PurchaseOrderItem.warehouse == warehouse)
& (PurchaseOrderItem.qty > PurchaseOrderItem.received_qty)
& (PurchaseOrder.status.notin(["Closed", "Delivered"]))
& (PurchaseOrder.docstatus == 1)
& (Coalesce(PurchaseOrderItem.delivered_by_supplier, 0) == 0)
)
.run()
)
return flt(ordered_qty[0][0]) if ordered_qty else 0
return purchase_order_qty[0][0] if purchase_order_qty else 0
def get_subcontracting_order_qty(item_code, warehouse):
SubcontractingOrder = frappe.qb.DocType("Subcontracting Order")
SubcontractingOrderItem = frappe.qb.DocType("Subcontracting Order Item")
subcontracting_order_qty = (
frappe.qb.from_(SubcontractingOrderItem)
.join(SubcontractingOrder)
.on(SubcontractingOrderItem.parent == SubcontractingOrder.name)
.select(
Sum(
(SubcontractingOrderItem.qty - SubcontractingOrderItem.received_qty)
* SubcontractingOrderItem.conversion_factor
)
)
.where(
(SubcontractingOrderItem.item_code == item_code)
& (SubcontractingOrderItem.warehouse == warehouse)
& (SubcontractingOrderItem.qty > SubcontractingOrderItem.received_qty)
& (SubcontractingOrder.status.notin(["Closed", "Completed"]))
& (SubcontractingOrder.docstatus == 1)
)
.run()
)
return subcontracting_order_qty[0][0] if subcontracting_order_qty else 0
def get_planned_qty(item_code, warehouse):

View File

@@ -827,7 +827,6 @@ class update_entries_after:
if not self.validate_negative_stock(sle):
self.wh_data.qty_after_transaction += flt(sle.actual_qty)
return
# Get dynamic incoming/outgoing rate
if not self.args.get("sle_id"):
self.get_dynamic_incoming_outgoing_rate(sle)
@@ -2328,6 +2327,7 @@ def get_incoming_rate_for_inter_company_transfer(sle) -> float:
For inter company transfer, incoming rate is the average of the outgoing rate
"""
rate = 0.0
lcv_rate = 0.0
field = "delivery_note_item" if sle.voucher_type == "Purchase Receipt" else "sales_invoice_item"
@@ -2342,7 +2342,15 @@ def get_incoming_rate_for_inter_company_transfer(sle) -> float:
"incoming_rate",
)
return rate
# add lcv amount in incoming_rate
lcv_amount = frappe.db.get_value(
f"{sle.voucher_type} Item", sle.voucher_detail_no, "landed_cost_voucher_amount"
)
if lcv_amount:
lcv_rate = flt(lcv_amount / abs(sle.actual_qty))
return rate + lcv_rate
def is_internal_transfer(sle):

View File

@@ -12,7 +12,7 @@ from erpnext.stock.doctype.stock_reservation_entry.stock_reservation_entry impor
StockReservation,
has_reserved_stock,
)
from erpnext.stock.stock_balance import update_bin_qty
from erpnext.stock.stock_balance import get_ordered_qty, update_bin_qty
from erpnext.stock.utils import get_bin
@@ -234,30 +234,7 @@ class SubcontractingOrder(SubcontractingController):
):
item_wh_list.append([item.item_code, item.warehouse])
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {"ordered_qty": self.get_ordered_qty(item_code, warehouse)})
@staticmethod
def get_ordered_qty(item_code, warehouse):
table = frappe.qb.DocType("Subcontracting Order")
child = frappe.qb.DocType("Subcontracting Order Item")
query = (
frappe.qb.from_(table)
.inner_join(child)
.on(table.name == child.parent)
.select((child.qty - child.received_qty) * child.conversion_factor)
.where(
(table.docstatus == 1)
& (child.item_code == item_code)
& (child.warehouse == warehouse)
& (child.qty > child.received_qty)
& (table.status != "Completed")
)
)
query = query.run()
return flt(query[0][0]) if query else 0
update_bin_qty(item_code, warehouse, {"ordered_qty": get_ordered_qty(item_code, warehouse)})
def update_reserved_qty_for_subcontracting(self, sco_item_rows=None):
for item in self.supplied_items:

View File

@@ -617,6 +617,117 @@ class TestSubcontractingReceipt(IntegrationTestCase):
for item in scr.supplied_items:
self.assertFalse(item.available_qty_for_consumption)
def test_supplied_items_consumed_qty_for_similar_finished_goods(self):
"""
Test that supplied raw material consumption is calculated correctly
when multiple subcontracted service items use the same finished good
but different BOMs.
"""
from erpnext.controllers.subcontracting_controller import (
make_rm_stock_entry as make_subcontract_transfer_entry,
)
from erpnext.manufacturing.doctype.production_plan.test_production_plan import make_bom
# Configuration: Backflush based on subcontract material transfer
set_backflush_based_on("Material Transferred for Subcontract")
# Create Raw Materials
raw_material_1 = make_item("_RM Item 1", properties={"is_stock_item": 1}).name
raw_material_2 = make_item("_RM Item 2", properties={"is_stock_item": 1}).name
# Create Subcontracted Finished Good
finished_good = make_item("_Finished Good Item", properties={"is_stock_item": 1})
finished_good.is_sub_contracted_item = 1
finished_good.save()
# Receive Raw Materials into Warehouse
for raw_material in (raw_material_1, raw_material_2):
make_stock_entry(
item_code=raw_material,
qty=10,
target="_Test Warehouse - _TC",
basic_rate=100,
)
# Create BOMs for the same Finished Good with different RMs
bom_rm_1 = make_bom(
item=finished_good.name,
quantity=1,
raw_materials=[raw_material_1],
).name
_bom_rm_2 = make_bom(
item=finished_good.name,
quantity=1,
raw_materials=[raw_material_2],
).name
# Define Subcontracted Service Items
service_items = [
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 1",
"qty": 1,
"rate": 100,
"fg_item": finished_good.name,
"fg_item_qty": 10,
},
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 1",
"qty": 1,
"rate": 150,
"fg_item": finished_good.name,
"fg_item_qty": 10,
},
]
# Create Subcontracting Order
subcontracting_order = get_subcontracting_order(
service_items=service_items,
do_not_save=True,
)
# Assign BOM only to the first service item
subcontracting_order.items[0].bom = bom_rm_1
subcontracting_order.save()
subcontracting_order.submit()
# Prepare Raw Material Transfer Items
raw_material_transfer_items = []
for supplied_item in subcontracting_order.supplied_items:
raw_material_transfer_items.append(
{
"item_code": supplied_item.main_item_code,
"rm_item_code": supplied_item.rm_item_code,
"qty": supplied_item.required_qty,
"warehouse": "_Test Warehouse - _TC",
"stock_uom": "Nos",
}
)
# Transfer Raw Materials to Subcontractor Warehouse
stock_entry = frappe.get_doc(
make_subcontract_transfer_entry(
subcontracting_order.name,
raw_material_transfer_items,
)
)
stock_entry.to_warehouse = "_Test Warehouse 1 - _TC"
stock_entry.save()
stock_entry.submit()
# Create Subcontracting Receipt
subcontracting_receipt = make_subcontracting_receipt(subcontracting_order.name)
subcontracting_receipt.save()
# Check consumed_qty for each supplied item
self.assertEqual(len(subcontracting_receipt.supplied_items), 2)
self.assertEqual(subcontracting_receipt.supplied_items[0].consumed_qty, 10)
self.assertEqual(subcontracting_receipt.supplied_items[1].consumed_qty, 10)
def test_supplied_items_cost_after_reposting(self):
# Set Backflush Based On as "BOM"
set_backflush_based_on("BOM")

View File

@@ -219,7 +219,7 @@
"collapsible": 1,
"indent": 0,
"keep_closed": 0,
"label": "Payment Reconciliaition",
"label": "Payment Reconciliation",
"link_to": "Payment Reconciliation",
"link_type": "DocType",
"show_arrow": 0,

View File

@@ -89,6 +89,18 @@
"show_arrow": 0,
"type": "Section Break"
},
{
"child": 1,
"collapsible": 1,
"icon": "",
"indent": 0,
"keep_closed": 0,
"label": "Item Lead Time",
"link_to": "Item Lead Time",
"link_type": "DocType",
"show_arrow": 0,
"type": "Link"
},
{
"child": 1,
"collapsible": 1,
@@ -425,7 +437,7 @@
"type": "Link"
}
],
"modified": "2026-01-10 00:06:13.058137",
"modified": "2026-01-29 16:41:40.416652",
"modified_by": "Administrator",
"module": "Manufacturing",
"name": "Manufacturing",

View File

@@ -13,7 +13,7 @@
"indent": 0,
"keep_closed": 0,
"label": "Sales Tax Template",
"link_to": "Item Tax Template",
"link_to": "Sales Taxes and Charges Template",
"link_type": "DocType",
"navigate_to_tab": "",
"show_arrow": 0,
@@ -148,7 +148,7 @@
"type": "Link"
}
],
"modified": "2026-01-10 00:06:13.005238",
"modified": "2026-02-01 00:00:00.000000",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Taxes",

View File

@@ -0,0 +1,230 @@
# Transaction Deletion CSV Import Logic - Updated Behavior
## Auto-Detection of Company Field
When importing a CSV without a `company_field` column or with empty values, the system uses smart auto-detection:
### Priority Order:
1. **"company" field** (most common convention)
- Check if a field named `company` exists that links to Company DocType
- ✅ Use "company" if found
2. **First Company link field** (custom fields)
- If no "company" field, get all fields linking to Company DocType
- ✅ Use the first one (sorted by field index)
3. **No company field** (DocTypes without company filtering)
- If no Company link fields exist at all
- ✅ Leave `company_field` as None/empty
- ✅ Delete ALL records (no company filtering)
## Import CSV Format
### Minimal Format (Auto-Detection)
```csv
doctype_name,child_doctypes
Sales Order,Sales Order Item
Note,
Task,
```
**Result:**
- `Sales Order`: Auto-detects "company" field → Filters by company
- `Note`: No company field → Deletes all Note records
- `Task`: Has "company" field → Filters by company
### Explicit Format (Recommended)
```csv
doctype_name,company_field,child_doctypes
Sales Order,company,Sales Order Item
Sales Contract,primary_company,Sales Contract Item
Sales Contract,billing_company,Sales Contract Item
Note,,
```
**Result:**
- `Sales Order`: Uses "company" field explicitly
- `Sales Contract` (row 1): Uses "primary_company" field
- `Sales Contract` (row 2): Uses "billing_company" field (separate row!)
- `Note`: No company field, deletes all records
### Multiple Company Fields Example
```csv
doctype_name,company_field,child_doctypes
Customer Invoice,head_office,Customer Invoice Item
Customer Invoice,billing_company,Customer Invoice Item
```
**Deletion Process:**
1. Row 1 deletes: `WHERE head_office = 'ABC Company'`
2. Row 2 deletes: `WHERE billing_company = 'ABC Company'`
3. Documents with both fields = ABC get deleted in first pass
4. Documents with only billing_company = ABC get deleted in second pass
## Validation Rules
### ✅ Accepted Cases
1. **DocType with "company" field** - Auto-detected
2. **DocType with custom Company link field** - Auto-detected (first field used)
3. **DocType with multiple Company fields** - Auto-detected (first field used), but user can add multiple rows
4. **DocType with NO Company fields** - Accepted! Deletes ALL records
5. **Explicit company_field provided** - Validated and used
### ❌ Rejected Cases
1. **Protected DocTypes** - User, Role, DocType, etc.
2. **Child tables** - Auto-deleted with parent
3. **Virtual DocTypes** - No database table
4. **Invalid company_field** - Field doesn't exist or isn't a Company link
5. **DocType doesn't exist** - Not found in system
## Code Flow
```python
# 1. Read company_field from CSV (may be empty)
company_field = row.get("company_field", "").strip()
# 2. Auto-detect if not provided
if not company_field:
# Try "company" first
if exists("company" field linking to Company):
company_field = "company"
else:
# Check for other Company link fields
company_fields = get_all_company_link_fields()
if company_fields:
company_field = company_fields[0] # Use first
# else: company_field stays empty
# 3. Validate if company_field was provided/detected
if company_field:
if not is_valid_company_link_field(company_field):
skip_with_error()
# 4. Count documents
if company_field:
count = count(WHERE company_field = self.company)
else:
count = count(all records)
# 5. Store in To Delete list
append({
"doctype_name": doctype_name,
"company_field": company_field or None, # Store None if empty
"document_count": count
})
```
## Examples
### Example 1: Standard DocType with "company" Field
**CSV:**
```csv
doctype_name,company_field,child_doctypes
Sales Order,,
```
**Auto-Detection:**
- Finds "company" field linking to Company
- Sets `company_field = "company"`
- Counts: `WHERE company = 'Test Company'`
- Result: Deletes only Test Company's Sales Orders
### Example 2: Custom Company Field
**CSV:**
```csv
doctype_name,company_field,child_doctypes
Project Contract,,
```
**Auto-Detection:**
- No "company" field found
- Finds "contracting_company" field linking to Company
- Sets `company_field = "contracting_company"`
- Counts: `WHERE contracting_company = 'Test Company'`
- Result: Deletes only Test Company's Project Contracts
### Example 3: No Company Field (Global DocType)
**CSV:**
```csv
doctype_name,company_field,child_doctypes
Note,,
Global Settings,,
```
**Auto-Detection:**
- No Company link fields found
- Sets `company_field = None`
- Counts: All records
- Result: Deletes ALL Note and Global Settings records
### Example 4: Multiple Company Fields (Explicit)
**CSV:**
```csv
doctype_name,company_field,child_doctypes
Sales Contract,primary_company,Sales Contract Item
Sales Contract,billing_company,Sales Contract Item
```
**No Auto-Detection:**
- Row 1: Uses "primary_company" explicitly
- Row 2: Uses "billing_company" explicitly
- Both rows validated as valid Company link fields
- Result: Two separate deletion passes
### Example 5: Mixed Approaches
**CSV:**
```csv
doctype_name,company_field,child_doctypes
Sales Order,,Sales Order Item
Sales Contract,billing_company,Sales Contract Item
Note,,
```
**Result:**
- Row 1: Auto-detects "company" field
- Row 2: Uses "billing_company" explicitly
- Row 3: No company field (deletes all)
## User Benefits
**Flexible**: Supports auto-detection and explicit specification
**Safe**: Validates all fields before processing
**Clear**: Empty company_field means "delete all"
**Powerful**: Can target specific company fields in multi-company setups
**Backward Compatible**: Old CSVs (without company_field column) still work
## Migration from Old Format
**Old CSV (without company_field):**
```csv
doctype_name,child_doctypes
Sales Order,Sales Order Item
```
**New System Behavior:**
- Auto-detects "company" field
- Works identically to before
- ✅ Backward compatible
**New CSV (with company_field):**
```csv
doctype_name,company_field,child_doctypes
Sales Order,company,Sales Order Item
```
**Benefits:**
- Explicit and clear
- Supports multiple rows per DocType
- Can specify custom company fields
---
*Generated for Transaction Deletion Record enhancement*