Saturday, May 30, 2015

Filtering on opportunity products lookup ("Existing Product" lookup inside Opportunity Product entity)

function AddNewView2() {

 try {
 $("#productid").attr("disableViewPicker", "0");
 // Parameters
 var customViewId = "FD140AAF-4DF4-11DD-BD17-0019B9312238"; // new id
 var customViewName = "Sales Products";
 var lookupFieldName = "productid";
 var entityName = "product";
 var primaryKeyName = "productid";
 var primaryFieldName = "productid";
 // Create Dynamics View
 AdvancedFilteredLookup2(lookupFieldName, customViewId, entityName, primaryKeyName, primaryFieldName, customViewName);
 $("#productid").attr("disableViewPicker", "1");
 }
 catch (err) {
 }
}

// Advanced Filtered Lookup
function AdvancedFilteredLookup2(lookupSchemaName, viewId, entityName, primaryKeyName, primaryFieldName, viewDisplayName) {
 var fetchXml = "<fetch version='1.0' output-format='xml-platform' mapping='logical'>" +
"<entity name='" + entityName + "'>" +
"<attribute name='name' />" +
"<attribute name='producttypecode' />" +
"<attribute name='productnumber' />" +
"<attribute name='" + primaryFieldName + "' />" +
"<order attribute='name' descending='false' />"+
"<filter type='and'>" +                      
    "<condition attribute='ti_salesproduct' operator='eq' value='1' />" +
    "</filter>" +
    "</entity>" +
    "</fetch>";

 var layoutXml = "<grid name='resultset' " +
"object='1024' " +
"jump='name' " +
"select='1' " +
"icon='1' " +
"preview='1'>" +
"<row name='result' " +
"id='" + primaryKeyName + "'>";

 layoutXml += "<cell name='name' width='300' />";
 layoutXml += "<cell name='productnumber' width='100' />";
 layoutXml += "<cell name='producttypecode' width='150' />";
 layoutXml += "</row></grid>";

 try {
 var lookupControl = Xrm.Page.ui.controls.get("productid");
 lookupControl.addCustomView(viewId, entityName, viewDisplayName, fetchXml, layoutXml, true);
 }
 catch (err) {
 }
}


No comments:

Post a Comment

Get files of last hour in Azure Data Factory

  Case I have a Data Factory pipeline that should run each hour and collect all new files added to the data lake since the last run. What is...