From 7b7775bec211e3470d4baf5fb65bc8f34ea08906 Mon Sep 17 00:00:00 2001 From: Tiehang Tim Duan Date: Mon, 8 Mar 2021 12:33:07 -0800 Subject: [PATCH] feature_segmented_histogram_binning_calibration Summary: We implement a hierarchical fine grained binning structure, with the top level corresponding to different feature segments and bottom level corresponding to different range of ECTR. The model is designed to be general enough to perform segmented calibration on any useful feature Test Plan: buck test dper3/dper3/modules/calibration/tests:calibration_test -- test_histogram_binning_calibration_by_feature buck test dper3/dper3_models/ads_ranking/model_impl/mtml/tests:mtml_lib_test -- test_multi_label_dependent_task_with_histogram_binning_calibration_by_feature e2e test: buck test dper3/dper3_models/ads_ranking/tests:model_paradigm_e2e_tests -- test_sparse_nn_histogram_binning_calibration_by_feature buck test dper3/dper3_models/ads_ranking/tests:model_paradigm_e2e_tests -- test_mtml_with_dependent_task_histogram_binning_calibration_by_feature All tests passed Canary packages: Backend -> aml.dper2.canary:e0cd05ac9b9e4797a94e930426d76d18 Frontend -> ads_dper3.canary:55819413dd0f4aa1a47362e7869f6b1f Test FBL jobs: **SparseNN** ctr mbl feed f255676727 inline cvr f255677216 **MTML regular task** offsite cvr f255676719 **MTML dependent task** mobile cvr f255677551 **DSNN for AI models** ai oc f255730905 **MIMO for both AI DSNN part and AF SNN part** mimo ig f255683062 Reviewed By: zhongyx12 Differential Revision: D25043060 fbshipit-source-id: 8237cad41db66a09412beb301bc45231e1444d6b --- caffe2/operators/batch_sparse_to_dense_op.cc | 26 ++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/caffe2/operators/batch_sparse_to_dense_op.cc b/caffe2/operators/batch_sparse_to_dense_op.cc index 355a21ccf8f..e4b3642b35c 100644 --- a/caffe2/operators/batch_sparse_to_dense_op.cc +++ b/caffe2/operators/batch_sparse_to_dense_op.cc @@ -113,6 +113,32 @@ after running this operator. "2-D dense tensor, with 1st dim = len(lengths), 2nd dim = dense_last_dim" "in the arg list, the tensor is of the same data type as `values`." "Missing values are filled with default_value") + .TensorInferenceFunction([](const OperatorDef& def, + const vector& in) { + ArgumentHelper helper(def); + vector output_dims; + if (in.size() == 4) { + const auto& inference_dims = GetDimsVector(in[3]); + output_dims.insert(output_dims.end(), inference_dims.begin(), inference_dims.end()); + const int dense_last_dim = helper.GetSingleArgument("dense_last_dim", 0); + if(dense_last_dim > 0) { + CAFFE_ENFORCE( + output_dims.back() == dense_last_dim, + "The last dim of output_shape_inference should be consistent with dense_last_dim"); + } + } else { + const int dense_last_dim = helper.GetSingleArgument("dense_last_dim", 0); + CAFFE_ENFORCE( + dense_last_dim > 0, + "dense_last_dim must be set when output shape inference is unavailable"); + const auto& lens_dims = GetDimsVector(in[0]); + output_dims.insert(output_dims.end(), lens_dims[0]); + output_dims.insert(output_dims.end(), dense_last_dim); + } + vector out(1); + out[0] = CreateTensorShape(output_dims, in[2].data_type()); + return out; + }) .Arg( "dense_last_dim", "Optional, output dense last dimension. "