pbalapra-software.bib
@url{automomml2016,
author = {P. Balaprakash and A. Tiwari and S. M. Wild and L. Carrington and P. D. Hovland},
date-added = {2019-02-10 21:03:47 +0000},
date-modified = {2019-02-10 21:03:47 +0000},
note = {AutoMOMML is an end-to-end, machine-learning-based framework to build predictive models for objectives such as performance, and power. The framework adopts statistical approaches to reduce the modeling complexity and automatically identifies and configures the most suitable learning algorithm to model the required objectives based on hardware and application signatures},
title = {{AutoMOMML: Automatic Multi-objective Modeling with Machine Learning}},
url = {https://xgitlab.cels.anl.gov/pbalapra/automomml},
year = {2016},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{surf2015,
author = {P. Balaprakash},
date-added = {2017-01-08 17:29:35 +0000},
date-modified = {2017-09-22 16:34:17 +0000},
note = {SuRF is a model-based search module for automatic performance tuning. It adopts random forest supervised learning algorithm for modeling the performances as a function of input parameters within the search. SuRF samples a small number of parameter configurations, empirically evaluating the corresponding code variants to obtain the corresponding performance metrics, and fitting a surrogate model over the input-output space. The surrogate model is then iteratively refined by obtaining new output metrics at unevaluated input configurations predicted to be high-performing by the model. Implemented in Python and available with the Orio autotuning framework.},
title = {{SuRF}: {Search using Random Forest}},
url = {https://github.com/brnorris03/Orio/tree/master/orio/main/tuner/search/mlsearch},
year = {2015},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{deephyper2018,
author = {P. Balaprakash and R. Egele and M. Salim and V. Vishwanath and S. M. Wild},
date-added = {2017-01-08 17:08:22 +0000},
date-modified = {2019-09-09 03:02:11 +0000},
note = {{DeepHyper: Scalable automated machine learning package with two components: 1) Reinforcement-learning-based neural architecture search for automatically constructing high-performing the deep neural network architecture; 2) Asynchronous model-based search for finding high-performing hyperparameters for deep neural networks.}},
title = {{DeepHyper: Scalable Asynchronous Neural Architecture and Hyperparameter Search for Deep Neural Networks}},
url = {https://github.com/deephyper/deephyper},
year = {2018},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{fpgaopt2016,
author = {P. Balaprakash and A. Mametjanov and C. Choudary and P. D. Hovland and S. M. Wild and G. Sabin},
date-added = {2016-07-11 01:08:34 +0000},
date-modified = {2016-07-30 21:04:42 +0000},
note = {A machine-learning-based approach to tune FPGA design parameters. It performs sampling-based reduction of the parameter space and guides the search toward promising parameter configurations.},
title = {{FPGAtuner: Autotuning FPGA Design Parameters for Performance and Power}},
url = {https://xgitlab.cels.anl.gov/pbalapra/fpgaopt},
year = {2016},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{supuds2017,
author = {P. Agarwal and P. Balaprakash and S. Leyffer and S. M. Wild},
date-added = {2016-07-11 01:03:27 +0000},
date-modified = {2017-01-08 17:27:37 +0000},
note = {SPUDS is a machine-learning pipeline to build classification models to rank food establishments that are at most risk for the types of violations most likely to spread food-borne illness. The pipeline balances the the training data with resampling techniques, identifies the most important factors leading to critical violations via variable importance and variable selection methods, evaluates several state-of-the-art learning with cross validation, and finally combines the best performing ones via bagging. It is a customized version of AutoMOMML, exclusively designed for City of Chicago Smart Data Platform, and implemented in Python.},
title = {{SPUDS: Smart Pipeline for Urban Data Science}},
url = {https://xgitlab.cels.anl.gov/uda/ml-city},
year = {2017},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{els-ptsp2009,
author = {P. Balaprakash and M. Birattari and T. St{\"u}tzle},
date-added = {2015-09-28 21:13:24 +0000},
date-modified = {2015-09-28 21:20:46 +0000},
note = {This software package provides a high-performance implementation of the estimation-based iterative improvement algorithm to tackle the probabilistic traveling salesman problem. A key novelty of the proposed algorithm is that the cost difference between two neighbor solutions is estimated by partial evaluation, adaptive, and importance sampling. Developed in C with GNU scientific library under Linux.},
title = {{ELS-PTSP}: {Estimation-based Local Search for the Probabilistic Traveling Salesman Problem}},
url = {https://github.com/pbalapra/els-ptsp},
year = {2009},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{irace2010,
author = {M. L. Ibanez and J. D. Lacoste and T. St{\"u}tzle and M. Birattari and E. Yuan and P. Balaprakash},
date-added = {2015-09-28 21:10:03 +0000},
date-modified = {2015-09-28 21:22:39 +0000},
note = {The irace package implements the iterated racing procedure, which is an extension of the Iterated F-race procedure. Its main purpose is to automatically configure optimization algorithms by finding the most appropriate settings given a set of instances of an optimization problem. It builds upon the race package by Birattari, and it is implemented in R},
title = {{The irace Package}: {Iterated Race for Automatic Algorithm Configuration}},
url = {http://cran.r-project.org/web/packages/irace/},
year = {2010},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}
@url{spapt2011,
author = {P. Balaprakash and S. M. Wild and B. Norris},
date-added = {2015-09-23 15:17:02 +0000},
date-modified = {2015-09-28 21:20:33 +0000},
note = {A set of extensible and portable search problems in automatic performance tuning whose goal is to aid in the development and improvement of search strategies and performance-improving transformations. SPAPT contains representative implementations from a number of lower-level, serial performance-tuning tasks in scientific applications. Available with the Orio autotuning framework.},
title = {{SPAPT}: {S}earch {P}roblems in {A}utomatic {P}erformance {T}uning},
url = {https://github.com/brnorris03/Orio/tree/master/testsuite/SPAPT},
year = {2011},
bdsk-url-1 = {http://dx.doi.org/10.1109/SC.Companion.2012.261}
}