Skip to content

Commit

Permalink
Merge pull request #42928 from Axentorllc/Fix-BOM-Scrap
Browse files Browse the repository at this point in the history
fix: getting scrap items from sub assemblies by fetching scrap items for parent BOM
  • Loading branch information
rohitwaghchaure authored Jan 16, 2025
2 parents b6ff79f + 8e33d93 commit a11c15a
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 0 deletions.
3 changes: 3 additions & 0 deletions erpnext/manufacturing/doctype/bom/bom.py
Original file line number Diff line number Diff line change
Expand Up @@ -1549,6 +1549,9 @@ def get_scrap_items_from_sub_assemblies(bom_no, company, qty, scrap_items=None):
fields=["bom_no", "qty"],
order_by="idx asc",
)
# fetch Scrap Items for Parent Bom
items = get_bom_items_as_dict(bom_no, company, qty=qty, fetch_exploded=0, fetch_scrap_items=1)
scrap_items.update(items)

for row in bom_items:
if not row.bom_no:
Expand Down
13 changes: 13 additions & 0 deletions erpnext/manufacturing/doctype/bom/test_bom.py
Original file line number Diff line number Diff line change
Expand Up @@ -755,6 +755,19 @@ def test_do_not_include_manufacturing_and_fixed_items(self):
self.assertTrue("_Test RM Item 2 Fixed Asset Item" not in items)
self.assertTrue("_Test RM Item 3 Manufacture Item" in items)

def test_get_scrap_items_from_sub_assemblies(self):
from erpnext.manufacturing.doctype.bom.bom import get_scrap_items_from_sub_assemblies

bom = frappe.copy_doc(test_records[1])
bom.insert(ignore_mandatory=True)

bom_scraped_items = [i.get("item_code") for i in bom.get("scrap_items", [])]

# get scrapted items for parent bom
scraped_items = get_scrap_items_from_sub_assemblies(bom.name, bom.company, 2, None)
for item_code in scraped_items.keys():
self.assertIn(item_code, bom_scraped_items, f"Item {item_code} not found in BOM scrap items")

def test_bom_raw_materials_stock_uom(self):
rm_item = make_item(
properties={"is_stock_item": 1, "valuation_rate": 1000.0, "stock_uom": "Nos"}
Expand Down

0 comments on commit a11c15a

Please sign in to comment.