I wrote this function today and shared it with my php100 friends
The first step. First, use regular expressions to extract all
$message //Article content
//Regular (this is not yet)
$reg = "/]*src="(http://(.+)/(.+).(jpg|gif|bmp|bnp))"/isU";
//Save the extracted img address into the $img_array variable
Preg_match_all($reg, $message, $img_array, PREG_PATTERN_ORDER);
//Filter duplicate images
$img_array = array_unique($img_array[1]);
[/php]
Step 2. Loop through the $img_array array. Save the image and replace the article position
[php]
foreach ($img_array as $img){
//Determine whether the picture is from your own website
If(xxx.com != get_domain($img)){// If this picture is not on your own server
//Read image file
$Gimg = new GetImage();
$Gimg->source = $img;
$Gimg->save_to = ./data/temp/;
$FILE = $Gimg->download(); //Move the picture to local
//Save to album to get the location where the picture is saved
$img_path = pic_save($FILE,0,);
//Text path replacement
$message = str_replace($img, $img_path, $message);
}
}
....At this time, the image in $message has been replaced with the local address of your own server, and the image is also saved on your own server.
[/php]
[php]
//The following function and class are found from the Internet.
//Get domain name from url
function get_domain($url){
$pattern = "/[w-]+.(com|net|org|gov|cc|biz|info|cn)(.(cn|hk))*/";
Preg_match($pattern, $url, $matches);
If(count($matches) > 0) {
return $matches[0];
}else{
$rs = parse_url($url);
$main_url = $rs["host"];
If(!strcmp(long2ip(sprintf("%u",ip2long($main_url))),$main_url)) {
return $main_url;
}else{
$arr = explode(".",$main_url);
$count=count($arr);
$endArr = array("com", "net", "org", "3322");//com.cn net.cn etc.
If (in_array($arr[$count-2],$endArr)){
$domain = $arr[$count-3].".".$arr[$count-2].".".$arr[$count-1];
}else{
$domain = $arr[$count-2].".".$arr[$count-1];
}
return $domain;
}// end if(!strcmp...)
}// end if(count...)
}//end function