@BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", title = "Analytic Research Foundations for the Next-Generation Electric Grid", isbn = "978-0-309-39231-0", abstract = "Electricity is the lifeblood of modern society, and for the vast majority of people that electricity is obtained from large, interconnected power grids. However, the grid that was developed in the 20th century, and the incremental improvements made since then, including its underlying analytic foundations, is no longer adequate to completely meet the needs of the 21st century. The next-generation electric grid must be more flexible and resilient. While fossil fuels will have their place for decades to come, the grid of the future will need to accommodate a wider mix of more intermittent generating sources such as wind and distributed solar photovoltaics. \n\nAchieving this grid of the future will require effort on several fronts. There is a need for continued shorter-term engineering research and development, building on the existing analytic foundations for the grid. But there is also a need for more fundamental research to expand these analytic foundations. Analytic Research Foundations for the Next-Generation Electric Grid provide guidance on the longer-term critical areas for research in mathematical and computational sciences that is needed for the next-generation grid. It offers recommendations that are designed to help direct future research as the grid evolves and to give the nation's research and development infrastructure the tools it needs to effectively develop, test, and use this research.", url = "https://nap.nationalacademies.org/catalog/21919/analytic-research-foundations-for-the-next-generation-electric-grid", year = 2016, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Francisco Samaniego and Michael Cohen", title = "Reliability Issues for DOD Systems: Report of a Workshop", isbn = "978-0-309-08606-6", abstract = "The final report of the National Research Council's (NRC) Panel on Statistical Methods for Testing and Evaluating Defense Systems (National Research Council, 1998) was intended to provide broad advice to the U.S. Department of Defense (DoD) on current statistical methods and principles that could be applied to the developmental and operational testing and evaluation of defense systems. To that end, the report contained chapters on the use of testing as a tool of system development; current methods of experimental design; evaluation methods; methods for testing and assessing reliability, availability, and maintainability; software development and testing; and validation of modeling and simulation for use in operational test and evaluation. While the examination of such a wide variety of topics was useful in helping DoD understand the breadth of problems for which statistical methods could be applied and providing direction as to how the methods currently used could be improved, there was, quite naturally, a lack of detail in each area.To address the need for further detail, two DoD agencies-the Office of the Director of Operational Test and Evaluation and the Office of the Under Secretary of Defense for Acquisition, Technology, and Logistics-asked the NRC's Committee on National Statistics to initiate a series of workshops on statistical issues relevant to defense acquisition. The aim of each workshop is to inform DoD about the methods that represent the statistical state of the art and, through interactions of the statistical and defense communities, explore their relevance for DoD application.", url = "https://nap.nationalacademies.org/catalog/10561/reliability-issues-for-dod-systems-report-of-a-workshop", year = 2002, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP title = "(NAS Colloquium) Elliptic Curves and Modular Forms", url = "https://nap.nationalacademies.org/catalog/6235/nas-colloquium-elliptic-curves-and-modular-forms", year = 1998, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Maureen Mellody", title = "Robust Methods for the Analysis of Images and Videos for Fisheries Stock Assessment: Summary of a Workshop", isbn = "978-0-309-31469-5", abstract = "The National Marine Fisheries Service (NMFS) is responsible for the stewardship of the nation's living marine resources and their habitat. As part of this charge, NMFS conducts stock assessments of the abundance and composition of fish stocks in several bodies of water. At present, stock assessments rely heavily on human data-gathering and analysis. Automatic means of fish stock assessments are appealing because they offer the potential to improve efficiency and reduce human workload and perhaps develop higher-fidelity measurements. The use of images and video, when accompanies by appropriate statistical analyses of the inferred data, is of increasing importance for estimating the abundance of species and their age distributions.\nRobust Methods for the Analysis of Images and Videos for Fisheries Stock Assessment is the summary of a workshop convened by the National Research Council Committee on Applied and Theoretical Statistics to discuss analysis techniques for images and videos for fisheries stock assessment. Experts from diverse communities shared perspective about the most efficient path toward improved automation of visual information and discussed both near-term and long-term goals that can be achieved through research and development efforts. This report is a record of the presentations and discussions of this event.", url = "https://nap.nationalacademies.org/catalog/18986/robust-methods-for-the-analysis-of-images-and-videos-for-fisheries-stock-assessment", year = 2015, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", title = "Strengthening Data Science Methods for Department of Defense Personnel and Readiness Missions", isbn = "978-0-309-45078-2", abstract = "The Office of the Under Secretary of Defense (Personnel & Readiness), referred to throughout this report as P&R, is responsible for the total force management of all Department of Defense (DoD) components including the recruitment, readiness, and retention of personnel. Its work and policies are supported by a number of organizations both within DoD, including the Defense Manpower Data Center (DMDC), and externally, including the federally funded research and development centers (FFRDCs) that work for DoD. P&R must be able to answer questions for the Secretary of Defense such as how to recruit people with an aptitude for and interest in various specialties and along particular career tracks and how to assess on an ongoing basis service members' career satisfaction and their ability to meet new challenges. P&R must also address larger-scale questions, such as how the current realignment of forces to the Asia-Pacific area and other regions will affect recruitment, readiness, and retention. \n\nWhile DoD makes use of large-scale data and mathematical analysis in intelligence, surveillance, reconnaissance, and elsewhere\u2014exploiting techniques such as complex network analysis, machine learning, streaming social media analysis, and anomaly detection\u2014these skills and capabilities have not been applied as well to the personnel and readiness enterprise. Strengthening Data Science Methods for Department of Defense Personnel and Readiness Missions offers and roadmap and implementation plan for the integration of data analysis in support of decisions within the purview of P&R.", url = "https://nap.nationalacademies.org/catalog/23670/strengthening-data-science-methods-for-department-of-defense-personnel-and-readiness-missions", year = 2017, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", editor = "Janet L. Lauritsen and Daniel L. Cork", title = "Modernizing Crime Statistics: Report 1: Defining and Classifying Crime", isbn = "978-0-309-44109-4", abstract = "To derive statistics about crime \u2013 to estimate its levels and trends, assess its costs to and impacts on society, and inform law enforcement approaches to prevent it \u2013 a conceptual framework for defining and thinking about crime is virtually a prerequisite. Developing and maintaining such a framework is no easy task, because the mechanics of crime are ever evolving and shifting: tied to shifts and development in technology, society, and legislation. \n\nInterest in understanding crime surged in the 1920s, which proved to be a pivotal decade for the collection of nationwide crime statistics. Now established as a permanent agency, the Census Bureau commissioned the drafting of a manual for preparing crime statistics\u2014intended for use by the police, corrections departments, and courts alike. The new manual sought to solve a perennial problem by suggesting a standard taxonomy of crime. Shortly after the Census Bureau issued its manual, the International Association of Chiefs of Police in convention adopted a resolution to create a Committee on Uniform Crime Records \u2014to begin the process of describing what a national system of data on crimes known to the police might look like. \n\nThe key distinction between the rigorous classification proposed in this report and the \u201cclassifications\u201d that have come before in U.S. crime statistics is that it is intended to partition the entirety of behaviors that could be considered criminal offenses into mutually exclusive categories. Modernizing Crime Statistics: Report 1: Defining and Classifying Crime assesses and makes recommendations for the development of a modern set of crime measures in the United States and the best means for obtaining them. This first report develops a new classification of crime by weighing various perspectives on how crime should be defined and organized with the needs and demands of the full array of crime data users and stakeholders.", url = "https://nap.nationalacademies.org/catalog/23492/modernizing-crime-statistics-report-1-defining-and-classifying-crime", year = 2016, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Statistical Analysis of Massive Data Streams: Proceedings of a Workshop", abstract = "Massive data streams, large quantities of data that arrive continuously, are becoming increasingly commonplace in many areas of science and technology. Consequently development of analytical methods for such streams is of growing importance. To address this issue, the National Security Agency asked the NRC to hold a workshop to explore methods for analysis of streams of data so as to stimulate progress in the field. This report presents the results of that workshop. It provides presentations that focused on five different research areas where massive data streams are present: atmospheric and meteorological data; high-energy physics; integrated data systems; network traffic; and mining commercial data streams. The goals of the report are to improve communication among researchers in the field and to increase relevant statistical science activity.", url = "https://nap.nationalacademies.org/catalog/11098/statistical-analysis-of-massive-data-streams-proceedings-of-a-workshop", year = 2004, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Mathematics and Physics of Emerging Biomedical Imaging", isbn = "978-0-309-05387-7", abstract = "This cross-disciplinary book documents the key research challenges in the mathematical sciences and physics that could enable the economical development of novel biomedical imaging devices. It is hoped that the infusion of new insights from mathematical scientists and physicists will accelerate progress in imaging. Incorporating input from dozens of biomedical researchers who described what they perceived as key open problems of imaging that are amenable to attack by mathematical scientists and physicists, this book introduces the frontiers of biomedical imaging, especially the imaging of dynamic physiological functions, to the educated nonspecialist.\nTen imaging modalities are covered, from the well-established (e.g., CAT scanning, MRI) to the more speculative (e.g., electrical and magnetic source imaging). For each modality, mathematics and physics research challenges are identified and a short list of suggested reading offered. Two additional chapters offer visions of the next generation of surgical and interventional techniques and of image processing. A final chapter provides an overview of mathematical issues that cut across the various modalities.", url = "https://nap.nationalacademies.org/catalog/5066/mathematics-and-physics-of-emerging-biomedical-imaging", year = 1996, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Candace Kruttschnitt and William D. Kalsbeek and Carol C. House", title = "Estimating the Incidence of Rape and Sexual Assault", isbn = "978-0-309-29737-0", abstract = "The Bureau of Justice Statistics' (BJS) National Crime Victimization Survey (NCVS) measures the rates at which Americans are victims of crimes, including rape and sexual assault, but there is concern that rape and sexual assault are undercounted on this survey. BJS asked the National Research Council to investigate this issue and recommend best practices for measuring rape and sexual assault on their household surveys. Estimating the Incidence of Rape and Sexual Assault concludes that it is likely that the NCVS is undercounting rape and sexual assault. The most accurate counts of rape and sexual assault cannot be achieved without measuring them separately from other victimizations, the report says. It recommends that BJS develop a separate survey for measuring rape and sexual assault. The new survey should more precisely define ambiguous words such as \"rape,\" give more privacy to respondents, and take other steps that would improve the accuracy of responses. Estimating the Incidence of Rape and Sexual Assault takes a fresh look at the problem of measuring incidents of rape and sexual assault from the criminal justice perspective. This report examines issues such as the legal definitions in use by the states for these crimes, best methods for representing the definitions in survey instruments so that their meaning is clear to respondents, and best methods for obtaining as complete reporting as possible of these crimes in surveys, including methods whereby respondents may report anonymously.\nRape and sexual assault are among the most injurious crimes a person can inflict on another. The effects are devastating, extending beyond the initial victimization to consequences such as unwanted pregnancy, sexually transmitted infections, sleep and eating disorders, and other emotional and physical problems. Understanding the frequency and context under which rape and sexual assault are committed is vital in directing resources for law enforcement and support for victims. These data can influence public health and mental health policies and help identify interventions that will reduce the risk of future attacks. Sadly, accurate information about the extent of sexual assault and rape is difficult to obtain because most of these crimes go unreported to police. Estimating the Incidence of Rape and Sexual Assault focuses on methodology and vehicles used to measure rape and sexual assaults, reviews potential sources of error within the NCVS survey, and assesses the training and monitoring of interviewers in an effort to improve reporting of these crimes.", url = "https://nap.nationalacademies.org/catalog/18605/estimating-the-incidence-of-rape-and-sexual-assault", year = 2014, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", editor = "Gooloo S. Wunderlich", title = "Rationalizing Rural Area Classifications for the Economic Research Service: A Workshop Summary", isbn = "978-0-309-38056-0", abstract = "The U.S. Department of Agriculture Economic Research Service (USDA\/ERS) maintains four highly related but distinct geographic classification systems to designate areas by the degree to which they are rural. The original urban-rural code scheme was developed by the ERS in the 1970s. Rural America today is very different from the rural America of 1970 described in the first rural classification report. \n\nAt that time migration to cities and poverty among the people left behind was a central concern. The more rural a residence, the more likely a person was to live in poverty, and this relationship held true regardless of age or race. Since the 1970s the interstate highway system was completed and broadband was developed. Services have become more consolidated into larger centers. Some of the traditional rural industries, farming and mining, have prospered, and there has been rural amenity-based in-migration. Many major structural and economic changes have occurred during this period. These factors have resulted in a quite different rural economy and society since 1970. \n\nIn April 2015, the Committee on National Statistics convened a workshop to explore the data, estimation, and policy issues for rationalizing the multiple classifications of rural areas currently in use by the Economic Research Service (ERS). Participants aimed to help ERS make decisions regarding the generation of a county rural-urban scale for public use, taking into consideration the changed social and economic environment. This report summarizes the presentations and discussions from the workshop. \n", url = "https://nap.nationalacademies.org/catalog/21843/rationalizing-rural-area-classifications-for-the-economic-research-service-a", year = 2016, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Academies of Sciences, Engineering, and Medicine", title = "Reproducibility and Replicability in Science", isbn = "978-0-309-48616-3", abstract = "One of the pathways by which the scientific community confirms the validity of a new scientific discovery is by repeating the research that produced it. When a scientific effort fails to independently confirm the computations or results of a previous study, some fear that it may be a symptom of a lack of rigor in science, while others argue that such an observed inconsistency can be an important precursor to new discovery.\nConcerns about reproducibility and replicability have been expressed in both scientific and popular media. As these concerns came to light, Congress requested that the National Academies of Sciences, Engineering, and Medicine conduct a study to assess the extent of issues related to reproducibility and replicability and to offer recommendations for improving rigor and transparency in scientific research.\nReproducibility and Replicability in Science defines reproducibility and replicability and examines the factors that may lead to non-reproducibility and non-replicability in research. Unlike the typical expectation of reproducibility between two computations, expectations about replicability are more nuanced, and in some cases a lack of replicability can aid the process of scientific discovery. This report provides recommendations to researchers, academic institutions, journals, and funders on steps they can take to improve reproducibility and replicability in science.", url = "https://nap.nationalacademies.org/catalog/25303/reproducibility-and-replicability-in-science", year = 2019, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Improved Operational Testing and Evaluation and Methods of Combining Test Information for the Stryker Family of Vehicles and Related Army Systems: Phase II Report", isbn = "978-0-309-09102-2", abstract = "The U.S. Army Test and Evaluation Command (ATEC) is responsible for the operational testing and evaluation of Army systems in development. ATECrequested that the National Research Council form the Panel on Operational Test Design and Evaluation of the Interim Armored Vehicle (Stryker). The charge to this panel was to explore three issues concerning the IOT plans for the Stryker\/SBCT. First, the panel was asked to examine the measures selected to assess the performance and effectiveness of the Stryker\/SBCT in comparison both to requirements and to the baseline system. Second, the panel was asked to review the test design for the Stryker\/SBCT initial operational test to see whether it is consistent with best practices. Third, the panel was asked to identify the advantages and disadvantages of techniques for combining operational test data with data from other sources and types of use. In a previous report (appended to the current report) the panel presented findings, conclusions, and recommendations pertaining to the first two issues: measures of performance and effectiveness, and test design. In the current report, the panel discusses techniques for combining information.", url = "https://nap.nationalacademies.org/catalog/10871/improved-operational-testing-and-evaluation-and-methods-of-combining-test-information-for-the-stryker-family-of-vehicles-and-related-army-systems", year = 2004, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", title = "Improved Operational Testing and Evaluation: Better Measurement and Test Design for the Interim Brigade Combat Team with Stryker Vehicles: Phase I Report", isbn = "978-0-309-08936-4", abstract = "The U.S. Army Test and Evaluation Command (ATEC) is responsible for the operational testing and evaluation of Army systems in development. ATEC requested that the National Research Council form the Panel on Operational Test Design and Evaluation of the Interim Armored Vehicle (Stryker) to explore three issues concerning the initial operation test plans for the Stryker\/Interim Brigade Combat Team (IBCT). First, the panel was asked to examine the measures selected to assess the performance and effectiveness of the Stryker\/IBCT in comparison both to requirements and to the baseline system. Second, the panel was asked to review the test design for the Stryker\/IBCT initial operational test to see whether it is consistent with best practices. Third, the panel was asked to identify the advantages and disadvantages of techniques for combining operational test data with data from other sources and types of use. In this report the panel presents findings, conclusions, and recommendations pertaining to the first two issues: measures of performance and effectiveness, and test design. The panel intends to prepare a second report that discusses techniques for combining information.", url = "https://nap.nationalacademies.org/catalog/10710/improved-operational-testing-and-evaluation-better-measurement-and-test-design", year = 2003, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Rebecca M. Blank and Marilyn Dabady and Constance F. Citro", title = "Measuring Racial Discrimination", isbn = "978-0-309-46923-4", abstract = "Many racial and ethnic groups in the United States, including blacks, Hispanics, Asians, American Indians, and others, have historically faced severe discrimination\u2014pervasive and open denial of civil, social, political, educational, and economic opportunities. Today, large differences among racial and ethnic groups continue to exist in employment, income and wealth, housing, education, criminal justice, health, and other areas. While many factors may contribute to such differences, their size and extent suggest that various forms of discriminatory treatment persist in U.S. society and serve to undercut the achievement of equal opportunity. \n\nMeasuring Racial Discrimination considers the definition of race and racial discrimination, reviews the existing techniques used to measure racial discrimination, and identifies new tools and areas for future research. The book conducts a thorough evaluation of current methodologies for a wide range of circumstances in which racial discrimination may occur, and makes recommendations on how to better assess the presence and effects of discrimination. \n", url = "https://nap.nationalacademies.org/catalog/10887/measuring-racial-discrimination", year = 2004, publisher = "The National Academies Press", address = "Washington, DC" } @BOOK{NAP author = "National Research Council", editor = "Robert M. Groves and Daniel L. Cork", title = "Surveying Victims: Options for Conducting the National Crime Victimization Survey", isbn = "978-0-309-11598-8", abstract = "It is easy to underestimate how little was known about crimes and victims before the findings of the National Crime Victimization Survey (NCVS) became common wisdom. In the late 1960s, knowledge of crimes and their victims came largely from reports filed by local police agencies as part of the Federal Bureau of Investigation's (FBI) Uniform Crime Reporting (UCR) system, as well as from studies of the files held by individual police departments. Criminologists understood that there existed a \"dark figure\" of crime consisting of events not reported to the police. However, over the course of the last decade, the effectiveness of the NCVS has been undermined by the demands of conducting an increasingly expensive survey in an effectively flat-line budgetary environment. \n\nSurveying Victims: Options for Conducting the National Crime Victimization Survey, reviews the programs of the Bureau of Justice Statistics (BJS.) Specifically, it explores alternative options for conducting the NCVS, which is the largest BJS program. This book describes various design possibilities and their implications relative to three basic goals; flexibility, in terms of both content and analysis; utility for gathering information on crimes that are not well reported to police; and small-domain estimation, including providing information on states or localities. \n\nThis book finds that, as currently configured and funded, the NCVS is not achieving and cannot achieve BJS's mandated goal to \"collect and analyze data that will serve as a continuous indication of the incidence and attributes of crime.\" Accordingly, Surveying Victims recommends that BJS be afforded the budgetary resources necessary to generate accurate measure of victimization.", url = "https://nap.nationalacademies.org/catalog/12090/surveying-victims-options-for-conducting-the-national-crime-victimization-survey", year = 2008, publisher = "The National Academies Press", address = "Washington, DC" }