class Http_MultiRequest
{
//List of urls to be crawled in parallel
private $urls = array();
//curl options
private $options;
//Constructor
function __construct($options = array())
{
$this->setOptions($options);
}
//Set url list
function setUrls($urls)
{
$this->urls = $urls;
return $this;
}
//Set options
function setOptions($options)
{
$options[CURLOPT_RETURNTRANSFER] = 1;
if (isset($options['HTTP_POST']))
{
curl_setopt($ch, CURLOPT_POST, 1);
curl_setopt($ch, CURLOPT_POSTFIELDS, $options['HTTP_POST']);
unset($options['HTTP_POST']);
}
if (!isset($options[CURLOPT_USERAGENT]))
{
$options[CURLOPT_USERAGENT] = 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1;)';
}
if (!isset($options[CURLOPT_FOLLOWLOCATION]))
{
$options[CURLOPT_FOLLOWLOCATION] = 1;
}
if (!isset($options[CURLOPT_HEADER]))
{
$options[CURLOPT_HEADER] = 0;
}
$this->options = $options;
}
//Fetch all content in parallel
function exec()
{
if(empty($this->urls) || !is_array($this->urls))
{
return false;
}
$curl = $data = array();
$mh = curl_multi_init();
foreach($this->urls as $k => $v)
{
$curl[$k] = $this->addHandle($mh, $v);
}
$this->execMulitHandle($mh);
foreach($this->urls as $k => $v)
{
$data[$k] = curl_multi_getcontent($curl[$k]);
curl_multi_remove_handle($mh, $curl[$k]);
}
curl_multi_close($mh);
return $data;
}
//Only grab the content of one web page.
function execOne($url)
{
if (empty($url)) {
return false;
}
$ch = curl_init($url);
$this->setOneOption($ch);
$content = curl_exec($ch);
curl_close($ch);
return $content;
}
//Internal function, set the options of a certain handle
private function setOneOption($ch)
{
curl_setopt_array($ch, $this->options);
}
//Add a new parallel crawl handle
private function addHandle($mh, $url)
{
$ch = curl_init($url);
$this->setOneOption($ch);
curl_multi_add_handle($mh, $ch);
return $ch;
}
//Parallel execution (this way of writing is a common mistake, I still use this way of writing here, this way of writing
//Downloading a small file may cause the cup to occupy 100%, and this cycle will run more than 100,000 times
//This is a typical error caused by not understanding the principle. This error is quite common in PHP official documentation. )
private function execMulitHandle($mh)
{
$running = null;
do {
curl_multi_exec($mh, $running);
} while ($running > 0);
}
}
/*The following is an example of a test for the above class:*/
$urls = array("http://baidu.com", "http://baidu.com", "http://baidu.com", "http://baidu.com", "http:// baidu.com", "http://baidu.com", "http://www.google.com", "http://www.sina.com.cn", );
$m = new Http_MultiRequest();
$t = microtime(true);
$m->setUrls($urls);
//parallel fetch (parallel fetch):
$data = $m->exec();
$parallel_time = microtime(true) - $t;
echo $parallel_time . "n";
$t = microtime(true);
//serial fetch (serial fetch):
foreach ($urls as $url)
{
$data[] = $m->execOne($url);
}
$serial_time = microtime(true) - $t;
echo $serial_time . "n";
I hope this article will be helpful to everyone’s PHP programming design.
http://www.bkjia.com/PHPjc/965524.htmlwww.bkjia.comtruehttp: //www.bkjia.com/PHPjc/965524.htmlTechArticle PHP multi-threading internal multi-threading example analysis This article mainly introduces PHP multi-threading internal multi-threading, examples Analyzed the usage skills of PHP multi-threading, which has certain reference value. It needs...