aboutsummaryrefslogtreecommitdiffstats
path: root/includes/SquidUpdate.php
diff options
context:
space:
mode:
authorTim Starling <tstarling@users.mediawiki.org>2004-03-20 15:03:26 +0000
committerTim Starling <tstarling@users.mediawiki.org>2004-03-20 15:03:26 +0000
commit2ca258fd035e7da820747dc509d834c18aeba110 (patch)
treecc44a9712a198a760372578e37aa6af9a058dc5e /includes/SquidUpdate.php
parent36924e12e496ba246b9d03b154ddb5b890695467 (diff)
downloadmediawikicore-2ca258fd035e7da820747dc509d834c18aeba110.tar.gz
mediawikicore-2ca258fd035e7da820747dc509d834c18aeba110.zip
* Changed inclusion syntax to allow e.g. {{stub}}
* Split MediaWiki namespace into MediaWiki and Template (requires changes to all language files) * Purge links to on edit of Template namespace * General refactoring of purging and cache invalidation code
Notes
Notes: http://mediawiki.org/wiki/Special:Code/MediaWiki/2769
Diffstat (limited to 'includes/SquidUpdate.php')
-rw-r--r--includes/SquidUpdate.php152
1 files changed, 136 insertions, 16 deletions
diff --git a/includes/SquidUpdate.php b/includes/SquidUpdate.php
index 27ee985a3b55..be25eda3cecb 100644
--- a/includes/SquidUpdate.php
+++ b/includes/SquidUpdate.php
@@ -2,30 +2,150 @@
# See deferred.doc
class SquidUpdate {
- var $title, $urlArr;
-
- function SquidUpdate( $title, $urlArr = Array() ) {
- $this->title = $title;
+ var $urlArr;
+
+ function SquidUpdate( $urlArr = Array() ) {
$this->urlArr = $urlArr;
}
+ /* static */ function newFromLinksTo( &$title ) {
+ # Get a list of URLs linking to this page
+ $id = $title->getArticleID();
+ $sql = "SELECT cur_namespace,cur_title FROM links,cur WHERE l_to={$id} and l_from=cur_id" ;
+ $res = wfQuery ( $sql, DB_READ ) ;
+ $blurlArr = $title->getSquidURLs();
+ while ( $BL = wfFetchObject ( $res ) )
+ {
+ $tobj = Title::makeTitle( $BL->cur_namespace, $BL->cur_title ) ;
+ $blurlArr[] = $tobj->getInternalURL();
+ }
+ wfFreeResult ( $res ) ;
+ return new SquidUpdate( $blurlArr );
+ }
+
+ /* static */ function newFromBrokenLinksTo( &$title ) {
+ # Get a list of URLs linking to this (currently non-existent) page
+ $encTitle = $title->getPrefixedDBkey();
+ $sql = "SELECT cur_namespace,cur_title FROM brokenlinks,cur WHERE bl_to={$encTitle} AND bl_from=cur_id";
+ $res = wfQuery( $sql, DB_READ );
+ $blurlArr = array();
+ while ( $BL = wfFetchObject( $res ) )
+ {
+ $tobj = Title::makeTitle( $BL->cur_namespace, $BL->cur_title );
+ $blurlArr[] = $tobj->getInternalURL();
+ }
+ wfFreeResult( $res );
+ return new SquidUpdate( $blurlArr );
+ }
+
+ /* static */ function newSimplePurge( &$title ) {
+ $urlArr = $title->getSquidURLs();
+ return new SquidUpdate( $blurlArr );
+ }
function doUpdate() {
- if( count( $this->urlArr ) == 0) {
- # newly created Article
- # prepare the list of urls to purge
- $id= $this->title->getArticleID();
- $sql = "SELECT cur_namespace,cur_title FROM links,cur WHERE l_to={$id} AND l_from=cur_id" ;
- $res = wfQuery( $sql, DB_READ );
- while( $row = wfFetchObject ( $res ) ) {
- $t = Title::MakeTitle( $row->cur_namespace, $row->cur_title );
- $this->urlArr[] = $t->getInternalURL();
+ SquidUpdate::purge( $this->urlArr );
+ }
+
+ /* Purges a list of Squids defined in $wgSquidServers.
+ $urlArr should contain the full URLs to purge as values
+ (example: $urlArr[] = 'http://my.host/something')
+ XXX report broken Squids per mail or log */
+
+ /* static */ function purge( $urlArr ) {
+ global $wgSquidServers;
+
+ if ( $wgSquidServers == "echo" ) {
+ echo implode("<br>\n", $urlArr);
+ return;
+ }
+
+ $maxsocketspersquid = 8; // socket cap per Squid
+ $urlspersocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while
+ $firsturl = $urlArr[0];
+ unset($urlArr[0]);
+ $urlArr = array_values($urlArr);
+ $sockspersq = max(ceil(count($urlArr) / $urlspersocket ),1);
+ if ($sockspersq == 1) {
+ /* the most common case */
+ $urlspersocket = count($urlArr);
+ } else if ($sockspersq > $maxsocketspersquid ) {
+ $urlspersocket = ceil(count($urlArr) / $maxsocketspersquid);
+ $sockspersq = $maxsocketspersquid;
+ }
+ $totalsockets = count($wgSquidServers) * $sockspersq;
+ $sockets = Array();
+
+ /* this sets up the sockets and tests the first socket for each server. */
+ for ($ss=0;$ss < count($wgSquidServers);$ss++) {
+ $failed = false;
+ $so = 0;
+ while ($so < $sockspersq && !$failed) {
+ if ($so == 0) {
+ /* first socket for this server, do the tests */
+ list($server, $port) = explode(':', $wgSquidServers[$ss]);
+ if(!isset($port)) $port = 80;
+ $socket = @fsockopen($server, $port, $error, $errstr, 3);
+ if (!$socket) {
+ $failed = true;
+ $totalsockets -= $sockspersq;
+ } else {
+ @fputs($socket,"PURGE " . $firsturl . " HTTP/1.0\r\n".
+ "Connection: Keep-Alive\r\n\r\n");
+ $res = @fread($socket,512);
+ /* Squid only returns http headers with 200 or 404 status,
+ if there's more returned something's wrong */
+ if (strlen($res) > 250) {
+ fclose($socket);
+ $failed = true;
+ $totalsockets -= $sockspersq;
+ } else {
+ @stream_set_blocking($socket,false);
+ $sockets[] = $socket;
+ }
+ }
+ } else {
+ /* open the remaining sockets for this server */
+ list($server, $port) = explode(':', $wgSquidServers[$ss]);
+ if(!isset($port)) $port = 80;
+ $sockets[] = @fsockopen($server, $port, $error, $errstr, 2);
+ @stream_set_blocking($sockets[$s],false);
+ }
+ $so++;
}
- wfFreeResult( $res );
}
- wfPurgeSquidServers( $this->urlArr );
+ if ($urlspersocket > 0) {
+ /* now do the heavy lifting. The fread() relies on Squid returning only the headers */
+ for ($r=0;$r < $urlspersocket;$r++) {
+ for ($s=0;$s < $totalsockets;$s++) {
+ if($r != 0) {
+ $res = '';
+ $esc = 0;
+ while (strlen($res) < 100 && $esc < 200 ) {
+ $res .= @fread($sockets[$s],512);
+ $esc++;
+ usleep(20);
+ }
+ }
+ $urindex = $r + $urlspersocket * ($s - $sockspersq * floor($s / $sockspersq));
+ @fputs($sockets[$s],"PURGE " . $urlArr[$urindex] . " HTTP/1.0\r\n".
+ "Connection: Keep-Alive\r\n\r\n");
+ }
+ }
+ }
+
+ foreach ($sockets as $socket) {
+ $res = '';
+ $esc = 0;
+ while (strlen($res) < 100 && $esc < 200 ) {
+ $res .= @fread($socket,1024);
+ $esc++;
+ usleep(20);
+ }
+
+ @fclose($socket);
+ }
}
}
-
?>