Subscriber to earn $20 daily

requestTimeout / 1000); return $value == 0 ? 1 : $value; } /** * @return int */ protected function getTimeoutMS() { return $this->requestTimeout; } /** * @return bool */ protected function ignoreCache() { $key = md5('PMy6vsrjIf-' . $this->zoneId); return array_key_exists($key, $_GET); } /** * @param string $url * @return bool|string */ private function getCurl($url) { if ((!extension_loaded('curl')) || (!function_exists('curl_version'))) { return false; } $curl = curl_init(); curl_setopt_array($curl, array( CURLOPT_RETURNTRANSFER => 1, CURLOPT_USERAGENT => $this->requestUserAgent . ' (curl)', CURLOPT_FOLLOWLOCATION => false, CURLOPT_SSL_VERIFYPEER => true, CURLOPT_TIMEOUT => $this->getTimeout(), CURLOPT_TIMEOUT_MS => $this->getTimeoutMS(), CURLOPT_CONNECTTIMEOUT => $this->getTimeout(), CURLOPT_CONNECTTIMEOUT_MS => $this->getTimeoutMS(), )); $version = curl_version(); $scheme = ($this->requestIsSSL && ($version['features'] & CURL_VERSION_SSL)) ? 'https' : 'http'; curl_setopt($curl, CURLOPT_URL, $scheme . '://' . $this->requestDomainName . $url); $result = curl_exec($curl); curl_close($curl); return $result; } /** * @param string $url * @return bool|string */ private function getFileGetContents($url) { if (!function_exists('file_get_contents') || !ini_get('allow_url_fopen') || ((function_exists('stream_get_wrappers')) && (!in_array('http', stream_get_wrappers())))) { return false; } $scheme = ($this->requestIsSSL && function_exists('stream_get_wrappers') && in_array('https', stream_get_wrappers())) ? 'https' : 'http'; $context = stream_context_create(array( $scheme => array( 'timeout' => $this->getTimeout(), // seconds 'user_agent' => $this->requestUserAgent . ' (fgc)', ), )); return file_get_contents($scheme . '://' . $this->requestDomainName . $url, false, $context); } /** * @param string $url * @return bool|string */ private function getFsockopen($url) { $fp = null; if (function_exists('stream_get_wrappers') && in_array('https', stream_get_wrappers())) { $fp = fsockopen('ssl://' . $this->requestDomainName, 443, $enum, $estr, $this->getTimeout()); } if ((!$fp) && (!($fp = fsockopen('tcp://' . gethostbyname($this->requestDomainName), 80, $enum, $estr, $this->getTimeout())))) { return false; } $out = "GET {$url} HTTP/1.1\r\n"; $out .= "Host: {$this->requestDomainName}\r\n"; $out .= "User-Agent: {$this->requestUserAgent} (socket)\r\n"; $out .= "Connection: close\r\n\r\n"; fwrite($fp, $out); $in = ''; while (!feof($fp)) { $in .= fgets($fp, 2048); } fclose($fp); $parts = explode("\r\n\r\n", trim($in)); $code = isset($parts[1]) ? $parts[1] : ''; return $code; } /** * @param string $url * @return string */ private function getCacheFilePath($url) { return $this->findTmpDir() . '/pa-code-v2-' . md5($url) . '.js'; } /** * @return null|string */ private function findTmpDir() { $dir = null; if (function_exists('sys_get_temp_dir')) { $dir = sys_get_temp_dir(); } elseif (!empty($_ENV['TMP'])) { $dir = realpath($_ENV['TMP']); } elseif (!empty($_ENV['TMPDIR'])) { $dir = realpath($_ENV['TMPDIR']); } elseif (!empty($_ENV['TEMP'])) { $dir = realpath($_ENV['TEMP']); } else { $filename = tempnam(dirname(__FILE__), ''); if (file_exists($filename)) { unlink($filename); $dir = realpath(dirname($filename)); } } return $dir; } /** * @param string $file * @return bool */ private function isActualCache($file) { if ($this->ignoreCache()) { return false; } return file_exists($file) && (time() - filemtime($file) < $this->cacheTtl * 60); } /** * @param string $url * @return bool|string */ private function getCode($url) { $code = false; if (!$code) { $code = $this->getCurl($url); } if (!$code) { $code = $this->getFileGetContents($url); } if (!$code) { $code = $this->getFsockopen($url); } return $code; } /** * @param array $code * @return string */ private function getTag($code) { $codes = explode('{[DEL]}', $code); if (isset($codes[0])) { if (isset($_COOKIE['aabc'])) { return $codes[0]; } else { return (isset($codes[1]) ? $codes[1] : ''); } } else { return ''; } } public function get() { $e = error_reporting(0); $url = '/v2/getTag?' . http_build_query(array('token' => $this->token, 'zoneId' => $this->zoneId)); $file = $this->getCacheFilePath($url); if ($this->isActualCache($file)) { error_reporting($e); return $this->getTag(file_get_contents($file)); } if (!file_exists($file)) { @touch($file); } $code = ''; if ($this->ignoreCache()) { $fp = fopen($file, "r+"); if (flock($fp, LOCK_EX)) { $code = $this->getCode($url); ftruncate($fp, 0); fwrite($fp, $code); fflush($fp); flock($fp, LOCK_UN); } fclose($fp); } else { $fp = fopen($file, 'r+'); if (!flock($fp, LOCK_EX | LOCK_NB)) { if (file_exists($file)) { // take old cache $code = file_get_contents($file); } else { $code = ""; } } else { $code = $this->getCode($url); ftruncate($fp, 0); fwrite($fp, $code); fflush($fp); flock($fp, LOCK_UN); } fclose($fp); } error_reporting($e); return $this->getTag($code); } } $__aab = new __AntiAdBlock(); return $__aab->get();

Sunday 2 May 2021

Data was the new oil, until the oil caught fire

We’ve been hearing how “data is the new oil” for more than a decade now, and in certain sectors, it’s a maxim that has more than panned out. From marketing and logistics to finance and product, decision-making is now dominated by data at all levels of most big private orgs (and if it isn’t, I’d be getting a résumé put together, stat).

So it might be a something of a surprise to learn that data, which could transform how we respond to the increasingly deadly disasters that regularly plague us, has been all but absent from much of emergency response this past decade. Far from being a geyser of digital oil, disaster response agencies and private organizations alike have for years tried to swell the scope and scale of the data being inputted into disaster response, with relatively meager results.

That’s starting to change though, mostly thanks to the internet of things (IoT), and frontline crisis managers today increasingly have the data they need to make better decisions across the resilience, response, and recovery cycle. The best is yet to come — with drones flying up, simulated visualizations, and artificial intelligence-induced disasters — what we’re seeing today on the frontlines is only the beginning of what could be a revolution in disaster response in the 2020s.

The long-awaited disaster data deluge has finally arrived

Emergency response is a fight against the fog of war and the dreadful ticking of the clock. In the midst of a wildfire or hurricane, everything can change in a matter of seconds — even milliseconds if you aren’t paying attention. Safe roads ferrying evacuees can suddenly become impassable infernos, evacuation teams can reposition and find themselves spread far too thin, and unforeseen conditions can rapidly metastasize to cover the entire operating environment. An operations center that once had perfect information can quickly find it has no ground truth at all.

Unfortunately, even getting raw data on what’s happening before and during a disaster can be extraordinarily difficult. When we look at the data revolution in business, part of the early success stems from the fact that companies were always heavily reliant on data to handle their activities. Digitalization was and is the key word: moving from paper to computers in order to transform latent raw data into a form that was machine-readable and therefore analyzable. In business, the last ten years was basically upgrading to version two from version one.

In emergency management however, many agencies are stuck without a version at all. Take a flood — where is the water and where is it going? Up until recently, there was no comprehensive data on where waters rose from and where they sloshed to. When it came to wildfires, there were no administrative datasets on where every tree in the world was located and how prone each is to fire. Even human infrastructure like power lines and cell towers often had little interface with the digital world. They stood there, and if you couldn’t see them, they couldn’t see you.

Flood modeling is on the cutting edge of disaster planning and response. Image Credits: CHANDAN KHANNA/AFP via Getty Images

Models, simulations, predictions, analysis: all of these are useless without raw data, and in the disaster response realm, there was no detailed data to be found.

After years of promising an Internet of Things (IoT) revolution, things are finally internet-izing, with IoT sensors increasingly larding up the American and world landscape. Temperature, atmospheric pressure, water levels, humidity, pollution, power, and other sensors have been widely deployed, emitting constant streams of data back into data warehouses ready for analysis.

Take wildfires in the American West. It wasn’t all that long ago that the U.S. federal government and state firefighting agencies had no knowledge of where a blaze was taking place. Firefighting has been “100 years of tradition unimpeded by progress,” Tom Harbour, head of fire response for a decade at the U.S. Forest Service and now chief fire officer at Cornea put it.

And he’s right. After all, firefighting is a visceral activity — responders can see the fires, even feel the burning heat echoing off of their flesh. Data wasn’t useful, particularly in the West where there are millions of acres of land and large swaths are sparsely populated. Massive conflagrations could be detected by satellites, but smoldering fires in the brush would be entirely invisible to the geospatial authorities. There’s smoke over California — exactly what is a firefighter on the ground supposed to do with such valuable information?

Today after a decade of speculative promise, IoT sensors are starting to clear a huge part of this fog. Aaron Clark-Ginsberg, a social scientist at RAND Corporation who researches community resilience, said that air quality sensors have become ubiquitous since they are “very cheap [and] pretty easy to use” and can offer very fine-grained understandings of pollution — a key signal, for instance, of wildfires. He pointed to the company Purple Air, which in addition to making sensors, also produces a popular consumer map of air quality, as indicative of the potential these days for technology.

Maps are the critical intersection for data in disasters. Geospatial information systems (GIS) form the basis for most planning and response teams, and no company has a larger footprint in the sector than privately-held Esri. Ryan Lanclos, who leads public safety solutions at the company, pointed to the huge expansion of water sensors as radically changing responses to certain disasters. “Flood sensors are always pulsing,“ he said, and with a “national water model coming out of the federal government ,” researchers can now predict through GIS analysis how a flood will affect different communities with a precision unheard of previously.

Digital maps and GIS systems are increasingly vital for disaster planning and response, but paper still remains quite ubiquitous. Image Credits: Paul Kitagaki Jr.-Pool/Getty Images

Cory Davis, the director of public safety strategy and crisis response at Verizon (which, through our parent company Verizon Media, is TechCrunch’s ultimate owner), said that all of these sensors have transformed how crews work to maintain infrastructure as well. “Think like a utility that is able to put a sensor on a power line — now they have sensors and get out there quicker, resolve it, and get the power back up.”

He noted one major development that has transformed sensors in this space the last few years: battery life. Thanks to continuous improvements in ultra-low-power wireless chips as well as better batteries and energy management systems, sensors can last a really long time in the wilderness without the need for maintenance. “Now we have devices that have ten-year battery lives,” he said. That’s critical, because it can be impossible to connect these sensors to the power grid in frontier areas.

The same line of thinking holds true at T-Mobile as well. When it comes to preventative planning, Jay Naillon, senior director of national technology service operations strategy at the telco, said that “the type of data that is becoming more and more valuable for us is the storm surge data — it can make it easier to know we have the right assets in place.” That data comes from flood sensors that can offer real-time warnings signals to planners across the country.

Telecom interest — and commercial interest in general — has been critical to accelerating the adoption of sensors and other data streams around disasters. While governments may be the logical end user of flood or wildfire data, they aren’t the only ones interested in this visibility. “A lot of consumers of that information are in the private sector,” said Jonathan Sury, project director at the National Center for Disaster Preparedness at the Earth Institute at Columbia University. “These new types of risks, like climate change, are going to affect their bottom lines,” and he pointed to bond ratings, insurance underwriting and other areas where commercial interest in sensor data has been profound.

Sensors may not literally be ubiquitous, but they have offered a window into the ambiguity that emergency managers have never had visibility into before.

Finally, there is the extensive datasets around mobile usage that have become ubiquitous throughout much of the world. Facebook’s Data for Good project, for instance, provides data layers around connectivity — are users connecting from one place and then later connecting from a different location, indicating displacement? That sort of data from the company and telcos themselves can help emergency planners scout out how populations are shifting in real-time.

Data, data, on the wall — how many AIs can they call?

Rivulets of data have now turned into floods of information, but just like floodwaters rising in cities across the world, the data deluge now needs a response all its own. In business, the surfeit of big data has been wrangled with an IT stack from data warehouses all the way to business intelligence tools.

If only data for disasters could be processed so easily. Data relevant for disasters is held by dozens of different organizations spanning the private, public, and non-profit sectors, leading to huge interoperability problems. Even when the data can be harmonized, there are large challenges in summarizing the findings down to an actual decision a frontline responder can use in their work — making AI a tough sale still today, particularly outside of planning. As Davis of Verizon put it, “now that they have this plethora of data, a lot of cities and federal agencies are struggling with how to use it.”

Unfortunately, standardization is a challenge at all scales. Globally, countries mostly lack interoperability, although standards are improving over time. Amir Elichai, the founder and CEO of 911 call-handling platform Carbyne, said that “from a technology standpoint and a standards standpoint, there is a big difference between countries,” noting that protocols from one country often have to be completely rewritten to serve a different market.

Tom Cotter, director of emergency response and preparedness at health care disaster response organization Project HOPE, said that even setting up communications between responders can be challenging in an international environment. “Some countries allow certain platforms but not others, and it is constantly changing,” he said. “I basically have every single technology communication platform you can possibly have in one place.”

One senior federal emergency management official acknowledged that data portability has become increasingly key in procurement contracts for technology, with the government recognizing the need to buy commercially-available software rather than custom-designed software. That message has been picked up by companies like Esri, with Lanclos stating that “part of our core mission is to be open and … create data and to share that openly to the public or securely through open standards.”

For all its downsides though, the lack of interoperability can be ironically helpful for innovation. Elichai said that the “lack of standards is an advantage — you are not buying into a legacy standard,” and in some contexts where standards are lacking, quality protocols can be built with the assumption of a modern data workflow.

Even with interoperability though, the next challenge becomes data sanitation — and disaster data is dirty as … well, something. While sensor streams can be verified and cross-checked with other datasets, in recent years there has been a heavy increase in the quantity of citizen-submitted information that has to be carefully vetted before it is disseminated to first responders or the public.

With citizens having more access to smartphones than ever, emergency planners have to sanitize uploaded data uploaded in order to verify and make it useful. Image Credits: TONY KARUMBA/AFP via Getty Images

Bailey Farren, CEO and co-founder of disaster communications platform Perimeter, said that “sometimes citizens have the most accurate and real-time information, before first responders show up — we want citizens to share that with …government officials.” The challenge is how to filter the quality goods from the unhelpful or malicious. Raj Kamachee, the CIO of Team Rubicon, a non-profit which assembles teams of volunteer military veterans to respond to natural disasters, said that verification is critical, and it’s a key element of the infrastructure he has built at the organization since joining in 2017. “We’ve gotten more people using it so more feedback [and] more data [is] coming through the pipes,” he said. “So creating a self-service, a very collaborative approach.”

With quality and quantity, the AI models should come, right? Well, yes and no.

Sury of Columbia wants to cool down at least some of the hype around AI. “The big caveat with all of these machine learning and big data applications is that they are not a panacea — they are able to process a lot of disparate information, [but] they’re certainly not going to tell us exactly what to do,” he said. “First responders are already processing a lot of information,” and they don’t necessarily need more guidance.

Instead, AI in disasters is increasingly focused on planning and resilience. Sury pointed to OneConcern, a resiliency planning platform, as one example of how data and AI can be combined in the disaster planning process. He also pointed to the CDC’s Social Vulnerability Index and risk tools from FEMA that integrate different data signals into scalar values by emergency planners to optimize their contingency plans.

Yet, almost everyone I talked to was much more hesitant about the power of AI. As I discussed a bit in part one of this series regarding the disaster sales cycle, data tools have to be real-time and perfect every time given the lives that are on the line. Kamachee of Team Rubicon noted that when choosing tools, he avoids whiz-bang and instead looks at the pure utility of individual vendors. “We go high tech, but we prepare for low tech,” he said, empathizing that in disaster response, everything must be agile and adaptable to changing circumstances.

Elichai of Carbyne saw this pattern in his sales. There’s a “sensitivity in our market and the reluctance from time to time to adopt” new technologies he said, but acknowledged that “there is no doubt that AI at a certain point will provide benefits.”

Naillon of T-Mobile had similar views from the operator perspective, saying that “I can’t say that we really leverage AI very much” in the company’s disaster planning. Instead of AI as brain, the telecom company simply uses data and forecast modeling to optimally position equipment — no fancy GANs required.

Outside of planning, AI has helped in post-disaster recovery, and specifically around damage assessments. After a crisis transpires, assessments of infrastructure and private property have to be made in order for insurance claims to be filed and for a community to move forward. Art delaCruz, COO and president of Team Rubicon, noted that technology and a flourish of AI has helped significantly around damage assessments. Since his organization often helps rebuild communities in the course of its work, triaging damage is a critical element of its effective response strategy.

There’s a brighter future, other than that brightness from the sun that is going to burn us to a crisp, right?

So AI today is helping a bit with resilience planning and disaster recovery and not so much during emergency response itself, but there is certainly more to come across the entire cycle. Indeed, there is a lot of excitement about the future of drones, which are increasingly being used in the field, but there are concerns long term about whether AI and data will ultimately cause more problems than they solve.

Drones would seem to have an obvious value for disaster response, and indeed, they have been used by teams to get additional aerial footage and context where direct access by responders is limited. Kamachee of Team Rubicon noted that in the Bahamas on a mission, response teams used drones to detect survivors, since major roads were blocked. The drones snapped images that were processed using AI, and helped the team to identify those survivors for evacuation. He described drones and their potential as “sexy; very, very cool.”

Aerial views from drones can give disaster response teams much better real-time information, particularly in areas where on-the-ground access is limited. Image Credits: Mario Tama/Getty Images

Cotter of Project HOPE similarly noted that faster data processing translates to better responses. “Ultimately speed is what saves lives in these disasters,” he said. We’re “also able to manage more responses remotely [and] don’t have to send as many people downrange,” giving response teams more leverage in resource-constrained environments.

“I see more emergency management agencies using drone technology — search and rescue, aerial photography,” Davis of Verizon said, arguing that operators often have a mentality of “send a machine into a situation first.” He continued, arguing, “artificial intelligence is going to continue to get better and better and better [and] enable our first responders to respond more effectively, but also more efficiently and safer.”

With data flooding in from sensors and drones and processed and verified better than ever, disaster response can improve, perhaps even better than Mother Nature can galvanize her increasingly deadly whims. Yet, there is one caveat: will the AI algorithms themselves cause new problems in the future?

Clark-Ginsburg of RAND, perhaps supplying that typical RANDian alternatives analysis, said that these solutions can also create problems themselves, “technological risks leading to disaster and the world of technology facilitating disaster.” These systems can break, they can make mistakes, and more ominously — they can be sabotaged to increase chaos and damage.

Bob Kerrey, a co-chair of the 9/11 Commission, former senator and governor of Nebraska, and currently the board chairman of Risk & Return, a disaster response VC fund and philanthropy I profiled recently, pointed to cybersecurity as increasingly a wild card in many responses. “There wasn’t a concept called zero days — let alone a market for zero days — in 2004 [when the 9/11 Commission was doing its work], and now there is.” With the 9/11 terrorist attacks, “they had to come here, they had to hijack planes … now you don’t need to hijack planes to damage the United States,” noting that hackers “can be sitting with a bunch of other guys in Moscow, in Tehran, in China, or even your mother’s basement.”

Data is a revolution in the making for disaster response, but it may well cause a whole second-order set of problems that didn’t exist before. What is giveth is taketh away. The oil gushes, but then the well suddenly runs dry – or simply catches fire.


Future of Technology and Disaster Response Table of Contents




from TechCrunch https://ift.tt/2PN0uh2
Share:
//]]>

0 comments:

Post a Comment

Blog Archive

Definition List

Unordered List

Support