diff options
Diffstat (limited to 'maintenance')
37 files changed, 1168 insertions, 655 deletions
diff --git a/maintenance/DiffLanguage.php b/maintenance/DiffLanguage.php index 7857635f43c2..b015591d72da 100644 --- a/maintenance/DiffLanguage.php +++ b/maintenance/DiffLanguage.php @@ -34,26 +34,14 @@ # The script then print a list of wgAllMessagesXX keys that aren't # localised, a percentage of messages correctly localised and the # number of messages to be translated. -# -# -# Known bugs: -# - File paths are hardcoded -# - - -$wgCommandLineMode = true; -# Turn off output buffering if it's on -@ob_end_flush(); -require_once("../LocalSettings.php"); -require_once( "../includes/Setup.php" ); -require_once( "../install-utils.inc" ); +require_once( "commandLine.inc" ); $wgLanguageCode = strtoupper(substr($wgLanguageCode,0,1)).strtolower(substr($wgLanguageCode,1)); # read command line argument -if ( isset($argv[1]) ) { - $lang = $argv[1]; +if ( isset($args[0]) ) { + $lang = $args[0]; # or prompt a simple menu } else { @@ -91,7 +79,7 @@ if ( isset($argv[1]) ) { # include the language if it's not the already loaded one if($lang != $wgLanguageCode) { print "Including language file for $lang.\n"; - include("Language{$lang}.php"); + include_once("Language{$lang}.php"); } /* ugly hack to load the correct array, if you have a better way diff --git a/maintenance/InitialiseMessages.inc b/maintenance/InitialiseMessages.inc index 12522466c1aa..699ec66d5e64 100755 --- a/maintenance/InitialiseMessages.inc +++ b/maintenance/InitialiseMessages.inc @@ -9,20 +9,26 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { global $wgOut, $wgArticle, $wgUser; global $wgMessageCache, $wgMemc, $wgDBname, $wgUseMemCached; + # Initialise $wgOut and $wgUser for a command line script + $wgOut->disable(); + + $wgUser = new User; + $wgUser->setLoaded( true ); # Don't load from DB + $wgUser->setName( 'MediaWiki default' ); + # Don't try to draw messages from the database we're initialising $wgMessageCache->disable(); - $fname = "initialiseMessages"; + $fname = 'initialiseMessages'; $ns = NS_MEDIAWIKI; # cur_user_text responsible for the modifications # Don't change it unless you're prepared to update the DBs accordingly, otherwise the # default messages won't be overwritte - $username = "MediaWiki default"; + $username = 'MediaWiki default'; $timestamp = wfTimestampNow(); $invTimestamp = wfInvertTimestamp( $timestamp ); - $mwMsg =& MagicWord::get( MAG_MSG ); - $navText = str_replace( "$1", "allmessagestext", $mwMsg->getSynonym( 0 ) ); + $navText = '{{int:allmessagestext}}'; $navText .= " <table border=1 width=100%><tr><td> @@ -39,7 +45,7 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { # Get keys from $wgAllMessagesEn, which is more complete than the local language $first = true; if ( $messageArray ) { - $sortedArray = $wgAllMessagesEn; + $sortedArray = $messageArray; } else { $sortedArray = $wgAllMessagesEn; } @@ -48,19 +54,19 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { # SELECT all existing messages foreach ( $sortedArray as $key => $enMsg ) { - if ( $key == "" ) { + if ( $key == '' ) { continue; // Skip odd members } if ( $first ) { $first = false; } else { - $sql .= ","; + $sql .= ','; } $titleObj = Title::newFromText( $key ); $enctitle = wfStrencode($titleObj->getDBkey()); $sql .= "'$enctitle'"; } - $sql .= ")"; + $sql .= ')'; $res = wfQuery( $sql, DB_READ ); $row = wfFetchObject( $res ); @@ -68,10 +74,10 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { # Decide whether or not each one needs to be overwritten $existingTitles = array(); while ( $row ) { - if ( !$row->cur_is_new || $row->cur_user_text != $username ) { - $existingTitles[$row->cur_title] = "keep"; + if ( $row->cur_user_text != $username ) { + $existingTitles[$row->cur_title] = 'keep'; } else { - $existingTitles[$row->cur_title] = "chuck"; + $existingTitles[$row->cur_title] = 'chuck'; } $row = wfFetchObject( $res ); @@ -83,14 +89,12 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { cur_user_text, cur_timestamp, cur_restrictions, cur_is_new, inverse_timestamp, cur_touched) VALUES "; $first = true; - $mwObj =& MagicWord::get( MAG_MSGNW ); - $msgnw = $mwObj->getSynonym( 0 ); $talk = $wgLang->getNsText( NS_TALK ); $mwtalk = $wgLang->getNsText( NS_MEDIAWIKI_TALK ); # Process each message foreach ( $sortedArray as $key => $enMsg ) { - if ( $key == "" ) { + if ( $key == '' ) { continue; // Skip odd members } # Get message text @@ -104,19 +108,12 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { $dbencMsg = wfStrencode( $message ); # Update messages which already exist - # Note: UPDATE is now used instead of DELETE/INSERT to avoid wiping cur_restrictions if ( array_key_exists( $title, $existingTitles ) ) { - if ( $existingTitles[$title] == "chuck" || $overwrite) { - wfQuery( "UPDATE cur - SET - cur_text='$dbencMsg', - cur_user=0, - cur_user_text='$username', - cur_timestamp='$timestamp', - cur_touched='$timestamp', - inverse_timestamp='$invTimestamp' - WHERE cur_namespace=8 and cur_title='$title'", DB_WRITE - ); + if ( $existingTitles[$title] == 'chuck' || $overwrite) { + # print "$title\n"; + $mwTitleObj = Title::makeTitle( NS_MEDIAWIKI, $title ); + $article = new Article( $mwTitleObj ); + $article->quickEdit( $message ); } $doInsert = false; } else { @@ -124,7 +121,7 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { if ( $first ) { $first = false; } else { - $sql .= ","; + $sql .= ','; } $sql .= "($ns, @@ -139,8 +136,6 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { } # Make table row for navigation page - $mw = str_replace( "$1", $key, $msgnw ); - $message = wfEscapeWikiText( $message ); $navText .= "<tr><td> @@ -149,7 +144,7 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) { </td><td> $message </td><td> -$mw +{{int:$title}} </td></tr>"; } @@ -160,9 +155,9 @@ $mw # Write the navigation page - $navText .= "</table>"; - $title = wfMsgNoDB( "allmessages" ); - $titleObj = Title::makeTitle( NS_MEDIAWIKI, $title ); + $navText .= '</table>'; + $title = wfMsgNoDB( 'allmessages' ); + $titleObj = Title::makeTitle( NS_WIKIPEDIA, $title ); $wgArticle = new Article( $titleObj ); $wgOut->disable(); $wgUser = User::newFromName( 'MediaWiki default' ); @@ -174,16 +169,25 @@ $mw # Clear the relevant memcached key if( $wgUseMemCached ) { - print "Clearing message cache..."; - $wgMemc->delete( "$wgDBname:messages" ); + print 'Clearing message cache...'; + $wgMemc->delete( $wgDBname.':messages' ); print "Done.\n"; } } -function loadArrayFromFile( $filename ) +function loadLanguageFile( $filename ) { $contents = file_get_contents( $filename ); + # Remove header line + $p = strpos( $contents, "\n" ) + 1; + $contents = substr( $contents, $p ); + # Unserialize return unserialize( $contents ); } +function doUpdates() { + global $wgDeferredUpdateList; + foreach ( $wgDeferredUpdateList as $up ) { $up->doUpdate(); } +} + ?> diff --git a/maintenance/archives/importTests.php b/maintenance/archives/importTests.php index ef751a759eaf..6e283790dbaa 100644 --- a/maintenance/archives/importTests.php +++ b/maintenance/archives/importTests.php @@ -26,7 +26,7 @@ $testingonly = true; setlocale( LC_ALL, "C" ); -include( "importUseModWiki.php" ); +include_once( "importUseModWiki.php" ); $wgRootDirectory = "./testconvert"; runTests(); diff --git a/maintenance/archives/moveCustomMessages.inc b/maintenance/archives/moveCustomMessages.inc new file mode 100644 index 000000000000..5438194690a1 --- /dev/null +++ b/maintenance/archives/moveCustomMessages.inc @@ -0,0 +1,149 @@ +<?php + +function isTemplateInitialised() { + $sql = "SELECT 1 FROM cur WHERE cur_namespace=" . NS_TEMPLATE . " LIMIT 1"; + $res = wfQuery( $sql, DB_READ ); + return wfNumRows( $res ) ? true : false; +} + +function moveCustomMessages( $phase ) { + global $wgUser, $wgAllMessagesEn, $wgDeferredUpdateList, $wgLang; + global $targets, $template, $replaceCount; + + $wgUser = new User; + $wgUser->setLoaded( true ); # Don't load from DB + $wgUser->setName( "Template namespace initialisation script" ); + $wgUser->addRight( "bot" ); + + wfIgnoreSQLErrors( true ); + + # Compose DB key array + $dbkeys = array(); + + foreach ( $wgAllMessagesEn as $key => $enValue ) { + $title = Title::newFromText( $key ); + $dbkeys[$title->getDBkey()] = 1; + } + + $sql = "SELECT cur_id, cur_title FROM cur WHERE cur_namespace= " . NS_MEDIAWIKI; + $res = wfQuery( $sql, DB_READ ); + + # Compile target array + $targets = array(); + while ( $row = wfFetchObject( $res ) ) { + if ( !array_key_exists( $row->cur_title, $dbkeys ) ) { + $targets[$row->cur_title] = 1; + } + } + wfFreeResult( $res ); + + # Create redirects from destination to source + if ( $phase == 0 || $phase == 1 ) { + print "Creating redirects\n"; + foreach ( $targets as $partial => $dummy ) { + print "$partial..."; + $nt = Title::makeTitle( NS_TEMPLATE, $partial ); + $ot = Title::makeTitle( NS_MEDIAWIKI, $partial ); + + if ( $nt->createRedirect( $ot, "" ) ) { + print "redirected\n"; + } else { + print "not redirected\n"; + } + } + if ( $phase == 0 ) { + print "\nRedirects created. Update live script files now.\nPress ENTER to continue.\n\n"; + readconsole(); + } + } + + # Move pages + if ( $phase == 0 || $phase == 2 ) { + print "\nMoving pages...\n"; + foreach ( $targets as $partial => $dummy ) { + wfQuery( "BEGIN", DB_WRITE ); + $ot = Title::makeTitle( NS_MEDIAWIKI, $partial ); + $nt = Title::makeTitle( NS_TEMPLATE, $partial ); + print "$partial..."; + + if ( $ot->moveNoAuth( $nt ) === true ) { + print "moved\n"; + } else { + print "not moved\n"; + } + # Do deferred updates + while ( count( $wgDeferredUpdateList ) ) { + $up = array_pop( $wgDeferredUpdateList ); + $up->doUpdate(); + } + wfQuery( "COMMIT", DB_WRITE ); + } + } + + # Convert text + if ( $phase == 0 || $phase == 3 ) { + print "\nConverting text...\n"; + + $parser = new Parser; + $options = ParserOptions::newFromUser( $wgUser ); + $completedTitles = array(); + $titleChars = Title::legalChars(); + $mediaWiki = $wgLang->getNsText( NS_MEDIAWIKI ); + $template = $wgLang->getNsText( NS_TEMPLATE ); + $linkRegex = "/\[\[$mediaWiki:([$titleChars]*?)\]\]/"; + $msgRegex = "/{{msg:([$titleChars]*?)}}/"; + + foreach ( $targets as $partial => $dummy ) { + $dest = Title::makeTitle( NS_MEDIAWIKI, $partial ); + $linksTo = $dest->getLinksTo(); + foreach( $linksTo as $source ) { + wfQuery( "BEGIN", DB_WRITE ); + $pdbk = $source->getPrefixedDBkey(); + if ( !array_key_exists( $pdbk, $completedTitles ) ) { + $completedTitles[$pdbk] = 1; + $id = $source->getArticleID(); + $row = wfGetArray( 'cur', array( 'cur_text' ), + array( 'cur_id' => $source->getArticleID() ) ); + $parser->startExternalParse( $source, $options, OT_WIKI ); + $text = $parser->strip( $row->cur_text, $stripState, false ); + # {{msg}} -> {{}} + $text = preg_replace( $msgRegex, "{{\$1}}", $text ); + # [[MediaWiki:]] -> [[Template:]] + $text = preg_replace_callback( $linkRegex, "wfReplaceMediaWiki", $text ); + $text = $parser->unstrip( $text, $stripState ); + $text = $parser->unstripNoWiki( $text, $stripState ); + if ( $text != $row->cur_text ) { + print "$pdbk\n"; + $art = new Article( $source ); + $art->updateArticle( $text, "", false, false ); + # Do deferred updates + while ( count( $wgDeferredUpdateList ) ) { + $up = array_pop( $wgDeferredUpdateList ); + $up->doUpdate(); + } + } else { + print "($pdbk)\n"; + } + } + wfQuery( "COMMIT", DB_WRITE ); + } + } + } +} + + +#-------------------------------------------------------------------------------------------------------------- +function wfReplaceMediaWiki( $m ) { + global $targets, $template, $replaceCount; + $title = Title::newFromText( $m[1] ); + $partial = $title->getDBkey(); + + if ( array_key_exists( $partial, $targets ) ) { + $text = "[[$template:{$m[1]}]]"; + } else { + $text = $m[0]; + } + return $text; +} + +?> diff --git a/maintenance/archives/moveCustomMessages.php b/maintenance/archives/moveCustomMessages.php index bcd49743501c..454bc830781d 100644 --- a/maintenance/archives/moveCustomMessages.php +++ b/maintenance/archives/moveCustomMessages.php @@ -8,132 +8,14 @@ # 3. Convert the text to suit the new syntax chdir( ".." ); -require_once( "commandLine.inc" ); +require_once( "liveCmdLine.inc" ); +require_once( "moveCustomMessages.inc" ); $phase = 0; -if ( is_numeric( @$argv[2] ) && $argv[2] > 0) { - $phase = intval($argv[2]); +if ( is_numeric( @$argv[3] ) && $argv[3] > 0) { + $phase = intval($argv[3]); } -$wgUser = new User; -$wgUser->setLoaded( true ); # Don't load from DB -$wgUser->setName( "Template namespace initialisation script" ); -$wgUser->addRight( "bot" ); +moveCustomMessages( $phase ); -# Compose DB key array -global $wgAllMessagesEn; -$dbkeys = array(); - -foreach ( $wgAllMessagesEn as $key => $enValue ) { - $title = Title::newFromText( $key ); - $dbkeys[$title->getDBkey()] = 1; -} - -$sql = "SELECT cur_id, cur_title FROM cur WHERE cur_namespace= " . NS_MEDIAWIKI; -$res = wfQuery( $sql, DB_READ ); - -# Compile target array -$targets = array(); -while ( $row = wfFetchObject( $res ) ) { - if ( !array_key_exists( $row->cur_title, $dbkeys ) ) { - $targets[$row->cur_title] = 1; - } -} -wfFreeResult( $res ); - -# Create redirects from destination to source -if ( $phase == 0 || $phase == 1 ) { - foreach ( $targets as $partial => $dummy ) { - print "$partial..."; - $nt = Title::makeTitle( NS_TEMPLATE, $partial ); - $ot = Title::makeTitle( NS_MEDIAWIKI, $partial ); - - if ( $nt->createRedirect( $ot, "" ) ) { - print "redirected\n"; - } else { - print "not redirected\n"; - } - } - if ( $phase == 0 ) { - print "\nRedirects created. Update live script files now.\nPress ENTER to continue.\n\n"; - readconsole(); - } -} - -# Move pages -if ( $phase == 0 || $phase == 2 ) { - print "\n"; - foreach ( $targets as $partial => $dummy ) { - $ot = Title::makeTitle( NS_MEDIAWIKI, $partial ); - $nt = Title::makeTitle( NS_TEMPLATE, $partial ); - print "$partial..."; - - if ( $ot->moveNoAuth( $nt ) === true ) { - print "moved\n"; - } else { - print "not moved\n"; - } - # Do deferred updates - while ( count( $wgDeferredUpdateList ) ) { - $up = array_pop( $wgDeferredUpdateList ); - $up->doUpdate(); - } - } -} - -# Convert text -if ( $phase == 0 || $phase == 3 ) { - print "\n"; - - $parser = new Parser; - $options = ParserOptions::newFromUser( $wgUser ); - $completedTitles = array(); - $titleChars = Title::legalChars(); - $mediaWiki = $wgLang->getNsText( NS_MEDIAWIKI ); - $template = $wgLang->getNsText( NS_TEMPLATE ); - $linkRegex = "/\[\[$mediaWiki:([$titleChars]*?)\]\]/"; - $msgRegex = "/{{msg:([$titleChars]*?)}}/"; - - foreach ( $targets as $partial => $dummy ) { - $dest = Title::makeTitle( NS_TEMPLATE, $partial ); - $linksTo = $dest->getLinksTo(); - foreach( $linksTo as $source ) { - $pdbk = $source->getPrefixedDBkey(); - print "$pdbk..."; - if ( !array_key_exists( $pdbk, $completedTitles ) ) { - $completedTitles[$pdbk] = 1; - $id = $source->getArticleID(); - $row = wfGetArray( 'cur', array( 'cur_text' ), - array( 'cur_id' => $source->getArticleID() ) ); - $parser->startExternalParse( $source, $options, OT_WIKI ); - $text = $parser->strip( $row->cur_text, $stripState, false ); - # {{msg}} -> {{}} - $text = preg_replace( $msgRegex, "{{\$1}}", $text ); - # [[MediaWiki:]] -> [[Template:]] - $text = preg_replace_callback( $linkRegex, "wfReplaceMediaWiki", $text ); - $text = $parser->unstrip( $text, $stripState ); - if ( $text != $row->cur_text ) { - wfUpdateArray( 'cur', array( 'cur_text' => $text ), array( 'cur_id' => $id ) ); - print "modified\n"; - } else { - print "not modified\n"; - } - } - } - } -} - -#-------------------------------------------------------------------------------------------------------------- -function wfReplaceMediaWiki( $m ) { - global $targets, $template, $replaceCount; - $title = Title::newFromText( $m[1] ); - $partial = $title->getDBkey(); - - if ( array_key_exists( $partial, $targets ) ) { - $text = "[[$template:{$m[1]}]]"; - } else { - $text = $m[0]; - } - return $text; -} ?> diff --git a/maintenance/archives/patch-linkscc-1.3.sql b/maintenance/archives/patch-linkscc-1.3.sql new file mode 100644 index 000000000000..6f9e6313532e --- /dev/null +++ b/maintenance/archives/patch-linkscc-1.3.sql @@ -0,0 +1,6 @@ +-- +-- linkscc table used to cache link lists in easier to digest form. +-- New schema for 1.3 - removes old lcc_title column. +-- May 2004 +-- +ALTER TABLE linkscc DROP COLUMN lcc_title;
\ No newline at end of file diff --git a/maintenance/archives/patch-profiling.sql b/maintenance/archives/patch-profiling.sql new file mode 100644 index 000000000000..ea9974ce3f76 --- /dev/null +++ b/maintenance/archives/patch-profiling.sql @@ -0,0 +1,10 @@ +-- profiling table +-- This is optional + +CREATE TABLE profiling ( + pf_count integer not null default 0, + pf_time float not null default 0, + pf_name varchar(255) not null default '', + UNIQUE KEY pf_name (pf_name) +); + diff --git a/maintenance/archives/patch-rc_ip.sql b/maintenance/archives/patch-rc_ip.sql new file mode 100644 index 000000000000..6106d93380f1 --- /dev/null +++ b/maintenance/archives/patch-rc_ip.sql @@ -0,0 +1,7 @@ +-- Adding the rc_ip field for logging of IP addresses in recentchanges + +ALTER TABLE recentchanges + ADD rc_ip char(15) NOT NULL default '', + ADD INDEX rc_ip (rc_ip); + + diff --git a/maintenance/attribute.php b/maintenance/attribute.php index 56c2d2acb3ec..a6dcf4a67346 100644 --- a/maintenance/attribute.php +++ b/maintenance/attribute.php @@ -1,31 +1,20 @@ <?php +# Script for re-attributing edits +require_once( "commandLine.inc" ); # Parameters - -if ($argc < 4) { +if ( count( $args ) < 2 ) { print "Not enough parameters\n"; - print "Usage: php attribute.php <lang> <source> <destination>\n"; + if ( $wgWikiFarm ) { + print "Usage: php attribute.php <language> <site> <source> <destination>\n"; + } else { + print "Usage: php attribute.php <source> <destination>\n"; + } exit; } -$lang = $argv[1]; -$source = $argv[2]; -$dest = $argv[3]; - -# Initialisation - -$wgCommandLineMode = true; -$DP = "../includes"; - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "/apache/htdocs/$lang/w/LocalSettings.php" ); -require_once( "Setup.php" ); - -$wgTitle = Title::newFromText( "Changing attribution script" ); -set_time_limit(0); -$wgCommandLineMode = true; +$source = $args[0]; +$dest = $args[1]; $eSource = wfStrencode( $source ); $eDest = wfStrencode( $dest ); diff --git a/maintenance/commandLine.inc b/maintenance/commandLine.inc index f868b0bfc783..dc895f129cc2 100644 --- a/maintenance/commandLine.inc +++ b/maintenance/commandLine.inc @@ -1,41 +1,121 @@ <?php +# Abort if called from a web server if ( isset( $_SERVER ) && array_key_exists( 'REQUEST_METHOD', $_SERVER ) ) { print "This script must be run from the command line\n"; exit(); } -$wgCommandLineMode = true; +# Process command line arguments +# $options becomes an array with keys set to the option names +# $optionsWithArgs is an array of GNU-style options that take an argument. The arguments are returned +# in the values of $options. + +if ( !isset( $optionsWithArgs ) ) { + $optionsWithArgs = array(); +} + +$self = array_shift( $argv ); +$IP = realpath( dirname( $self ) . "/.." ); +chdir( $IP ); + +$options = array(); +$args = array(); +for( $arg = reset( $argv ); $arg !== false; $arg = next( $argv ) ) { + if ( substr( $arg, 0, 2 ) == '--' ) { + # Long options + $option = substr( $arg, 2 ); + if ( in_array( $option, $optionsWithArgs ) ) { + $param = next( $argv ); + if ( $param === false ) { + die( "$arg needs an value after it\n" ); + } + $options[$option] = $param; + } else { + $options[$option] = 1; + } + } elseif ( $arg{0} == '-' ) { + # Short options + for ( $p=1; $p<strlen( $arg ); $p++ ) { + $option = $arg{$p}; + if ( in_array( $option, $optionsWithArgs ) ) { + $param = next( $argv ); + if ( $param === false ) { + die( "$arg needs an value after it\n" ); + } + $options[$option] = $param; + } else { + $options[$option] = 1; + } + } + } else { + $args[] = $arg; + } +} + +# General initialisation + +$wgCommandLineMode = true; +# Turn off output buffering if it's on +@ob_end_flush(); $sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -if ( @$argv[1] && @$argv[1] != "-" ) { - $lang = $argv[1]; + +if ( $sep == ":" && strpos( `hostname -a`, "wikimedia.org" ) !== false ) { + $wgWikiFarm = true; + if ( isset( $args[0] ) ) { + $lang = array_shift( $args ); + } else { + $lang = "aa"; + } + if ( isset( $args[0] ) ) { + $site = array_shift( $args ); + } else { + $site = "wikipedia"; + } + + # This is for the IRC scripts, which now run as the apache user + # The apache user doesn't have access to the wikiadmin_pass command + if ( $_ENV['USER'] != "apache" ) { + $wgDBuser = $wgDBadminuser = "wikiadmin"; + $wgDBpassword = $wgDBadminpassword = trim(`wikiadmin_pass`); + } + putenv( "wikilang=$lang"); - $settingsFile = "/apache/htdocs/{$argv[1]}/w/LocalSettings.php"; - $newpath = "/apache/common/php$sep"; + + $DP = $IP; + ini_set( "include_path", ".:$IP:$IP/includes:$IP/languages:$IP/maintenance" ); + + require_once( "/home/wikipedia/common/php-new/CommonSettings.php" ); } else { - $settingsFile = "../LocalSettings.php"; - $newpath = ""; -} + $wgWikiFarm = false; + $settingsFile = "$IP/LocalSettings.php"; -if ( ! is_readable( $settingsFile ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); + if ( ! is_readable( $settingsFile ) ) { + print "A copy of your installation's LocalSettings.php\n" . + "must exist in the source directory.\n"; + exit(); + } + $wgCommandLineMode = true; + $DP = $IP; + include_once( $settingsFile ); + ini_set( "include_path", ".$sep$IP$sep$IP/includes$sep$IP/languages$sep$IP/maintenance" ); + include_once( "$IP/AdminSettings.php" ); } - +# Turn off output buffering again, it might have been turned on in the settings files +@ob_end_flush(); +# Same with these $wgCommandLineMode = true; -$DP = "../includes"; -include_once( $settingsFile ); -ini_set( "include_path", "../includes$sep../languages$sep$newpath$IP$sep$include_path" ); +$wgDBuser = $wgDBadminuser; +$wgDBpassword = $wgDBadminpassword; + $wgUsePHPTal = false; define("MEDIAWIKI",true); -include_once( "Setup.php" ); -include_once( "./InitialiseMessages.inc" ); -include_once( "../install-utils.inc" ); -$wgTitle = Title::newFromText( "Rebuild messages script" ); -$wgCommandLineMode = true; +require_once( "Setup.php" ); +require_once( "install-utils.inc" ); +$wgTitle = Title::newFromText( "Command line script" ); set_time_limit(0); + ?> diff --git a/maintenance/compressOld.inc b/maintenance/compressOld.inc index ff47e15f96d7..8da4272571c2 100644 --- a/maintenance/compressOld.inc +++ b/maintenance/compressOld.inc @@ -1,7 +1,5 @@ <?php -include_once( "Article.php" ); - function compressOldPages( $start = 0 ) { $chunksize = 50; print "Starting from old_id $start...\n"; diff --git a/maintenance/compressOld.php b/maintenance/compressOld.php index d3b88ddf9927..0d3531376efa 100644 --- a/maintenance/compressOld.php +++ b/maintenance/compressOld.php @@ -1,29 +1,9 @@ <?php -# Rebuild search index table from scratch. This takes several -# hours, depending on the database size and server configuration. +# Compress the old table, old_flags=gzip -if ( ! is_readable( "../LocalSettings.php" ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -$wgCommandLineMode = true; -$DP = "../includes"; -require_once( "../LocalSettings.php" ); -require_once( "../AdminSettings.php" ); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "Setup.php" ); -require_once( "./compressOld.inc" ); -$wgTitle = Title::newFromText( "Compress old pages script" ); -set_time_limit(0); - -$wgDBuser = $wgDBadminuser; -$wgDBpassword = $wgDBadminpassword; +require_once( "commandLine.inc" ); +require_once( "compressOld.inc" ); if( !function_exists( "gzdeflate" ) ) { print "You must enable zlib support in PHP to compress old revisions!\n"; diff --git a/maintenance/convertLinks.inc b/maintenance/convertLinks.inc new file mode 100644 index 000000000000..3a25930ff4ca --- /dev/null +++ b/maintenance/convertLinks.inc @@ -0,0 +1,202 @@ +<?php + +function convertLinks() { + print "Converting links table to ID-ID...\n"; + + global $wgLang, $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname; + global $noKeys, $logPerformance, $fh; + + $numRows = $tuplesAdded = $numBadLinks = $curRowsRead = 0; #counters etc + $totalTuplesInserted = 0; # total tuples INSERTed into links_temp + + $reportCurReadProgress = true; #whether or not to give progress reports while reading IDs from cur table + $curReadReportInterval = 1000; #number of rows between progress reports + + $reportLinksConvProgress = true; #whether or not to give progress reports during conversion + $linksConvInsertInterval = 1000; #number of rows per INSERT + + $initialRowOffset = 0; + #$finalRowOffset = 0; # not used yet; highest row number from links table to process + + # Overwrite the old links table with the new one. If this is set to false, + # the new table will be left at links_temp. + $overwriteLinksTable = true; + + # Don't create keys, and so allow duplicates in the new links table. + # This gives a huge speed improvement for very large links tables which are MyISAM. (What about InnoDB?) + $noKeys = false; + + + $logPerformance = false; # output performance data to a file + $perfLogFilename = "convLinksPerf.txt"; + #-------------------------------------------------------------------- + + $res = wfQuery( "SELECT l_from FROM links LIMIT 1", DB_READ ); + if ( mysql_field_type( $res, 0 ) == "int" ) { + print "Schema already converted\n"; + return; + } + + $res = wfQuery( "SELECT COUNT(*) AS count FROM links", DB_WRITE ); + $row = wfFetchObject($res); + $numRows = $row->count; + wfFreeResult( $res ); + + if ( $numRows == 0 ) { + print "Updating schema (no rows to convert)...\n"; + createTempTable(); + } else { + if ( $logPerformance ) { $fh = fopen ( $perfLogFilename, "w" ); } + $baseTime = $startTime = getMicroTime(); + # Create a title -> cur_id map + print "Loading IDs from cur table...\n"; + performanceLog ( "Reading $numRows rows from cur table...\n" ); + performanceLog ( "rows read vs seconds elapsed:\n" ); + wfBufferSQLResults( false ); + $res = wfQuery( "SELECT cur_namespace,cur_title,cur_id FROM cur", DB_WRITE ); + $ids = array(); + + while ( $row = wfFetchObject( $res ) ) { + $title = $row->cur_title; + if ( $row->cur_namespace ) { + $title = $wgLang->getNsText( $row->cur_namespace ) . ":$title"; + } + $ids[$title] = $row->cur_id; + $curRowsRead++; + if ($reportCurReadProgress) { + if (($curRowsRead % $curReadReportInterval) == 0) { + performanceLog( $curRowsRead . " " . (getMicroTime() - $baseTime) . "\n" ); + print "\t$curRowsRead rows of cur table read.\n"; + } + } + } + wfFreeResult( $res ); + wfBufferSQLResults( true ); + print "Finished loading IDs.\n\n"; + performanceLog( "Took " . (getMicroTime() - $baseTime) . " seconds to load IDs.\n\n" ); + #-------------------------------------------------------------------- + + # Now, step through the links table (in chunks of $linksConvInsertInterval rows), + # convert, and write to the new table. + createTempTable(); + performanceLog( "Resetting timer.\n\n" ); + $baseTime = getMicroTime(); + print "Processing $numRows rows from links table...\n"; + performanceLog( "Processing $numRows rows from links table...\n" ); + performanceLog( "rows inserted vs seconds elapsed:\n" ); + + for ($rowOffset = $initialRowOffset; $rowOffset < $numRows; $rowOffset += $linksConvInsertInterval) { + $sqlRead = "SELECT * FROM links ".wfLimitResult($linksConvInsertInterval,$rowOffset); + $res = wfQuery($sqlRead, DB_READ); + if ( $noKeys ) { + $sqlWrite = array("INSERT INTO links_temp(l_from,l_to) VALUES "); + } else { + $sqlWrite = array("INSERT IGNORE INTO links_temp(l_from,l_to) VALUES "); + } + + $tuplesAdded = 0; # no tuples added to INSERT yet + while ( $row = wfFetchObject($res) ) { + $fromTitle = $row->l_from; + if ( array_key_exists( $fromTitle, $ids ) ) { # valid title + $from = $ids[$fromTitle]; + $to = $row->l_to; + if ( $tuplesAdded != 0 ) { + $sqlWrite[] = ","; + } + $sqlWrite[] = "($from,$to)"; + $tuplesAdded++; + } else { # invalid title + $numBadLinks++; + } + } + wfFreeResult($res); + #print "rowOffset: $rowOffset\ttuplesAdded: $tuplesAdded\tnumBadLinks: $numBadLinks\n"; + if ( $tuplesAdded != 0 ) { + if ($reportLinksConvProgress) { + print "Inserting $tuplesAdded tuples into links_temp..."; + } + wfQuery( implode("",$sqlWrite) , DB_WRITE ); + $totalTuplesInserted += $tuplesAdded; + if ($reportLinksConvProgress) + print " done. Total $totalTuplesInserted tuples inserted.\n"; + performanceLog( $totalTuplesInserted . " " . (getMicroTime() - $baseTime) . "\n" ); + } + } + print "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n\n"; + performanceLog( "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n" ); + performanceLog( "Total execution time: " . (getMicroTime() - $startTime) . " seconds.\n" ); + if ( $logPerformance ) { fclose ( $fh ); } + } + #-------------------------------------------------------------------- + + if ( $overwriteLinksTable ) { + $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname ); + if (!($dbConn->isOpen())) { + print "Opening connection to database failed.\n"; + return; + } + # Check for existing links_backup, and delete it if it exists. + print "Dropping backup links table if it exists..."; + $dbConn->query( "DROP TABLE IF EXISTS links_backup", DB_WRITE); + print " done.\n"; + + # Swap in the new table, and move old links table to links_backup + print "Swapping tables 'links' to 'links_backup'; 'links_temp' to 'links'..."; + $dbConn->query( "RENAME TABLE links TO links_backup, links_temp TO links", DB_WRITE ); + print " done.\n\n"; + + $dbConn->close(); + print "Conversion complete. The old table remains at links_backup;\n"; + print "delete at your leisure.\n"; + } else { + print "Conversion complete. The converted table is at links_temp;\n"; + print "the original links table is unchanged.\n"; + } +} + +#-------------------------------------------------------------------- + +function createTempTable() { + global $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname; + global $noKeys; + $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname ); + + if (!($dbConn->isOpen())) { + print "Opening connection to database failed.\n"; + return; + } + + print "Dropping temporary links table if it exists..."; + $dbConn->query( "DROP TABLE IF EXISTS links_temp", DB_WRITE); + print " done.\n"; + + print "Creating temporary links table..."; + if ( $noKeys ) { + $dbConn->query( "CREATE TABLE links_temp ( " . + "l_from int(8) unsigned NOT NULL default '0', " . + "l_to int(8) unsigned NOT NULL default '0')", DB_WRITE); + } else { + $dbConn->query( "CREATE TABLE links_temp ( " . + "l_from int(8) unsigned NOT NULL default '0', " . + "l_to int(8) unsigned NOT NULL default '0', " . + "UNIQUE KEY l_from(l_from,l_to), " . + "KEY (l_to))", DB_WRITE); + } + print " done.\n\n"; +} + +function performanceLog( $text ) { + global $logPerformance, $fh; + if ( $logPerformance ) { + fwrite( $fh, $text ); + } +} + +function getMicroTime() { # return time in seconds, with microsecond accuracy + list($usec, $sec) = explode(" ", microtime()); + return ((float)$usec + (float)$sec); +} + + + +?> diff --git a/maintenance/convertLinks.php b/maintenance/convertLinks.php index 8e3bb51799e4..3511e407d5ce 100644 --- a/maintenance/convertLinks.php +++ b/maintenance/convertLinks.php @@ -3,196 +3,8 @@ # The wiki should be put into read-only mode while this script executes require_once( "commandLine.inc" ); -# the below should probably be moved into commandLine.inc at some point -require_once( "../AdminSettings.php" ); +require_once( "convertLinks.inc" ); -$numRows = $tuplesAdded = $numBadLinks = $curRowsRead = 0; #counters etc -$totalTuplesInserted = 0; # total tuples INSERTed into links_temp +convertLinks(); -$reportCurReadProgress = true; #whether or not to give progress reports while reading IDs from cur table -$curReadReportInterval = 1000; #number of rows between progress reports - -$reportLinksConvProgress = true; #whether or not to give progress reports during conversion -$linksConvInsertInterval = 1000; #number of rows per INSERT - -$initialRowOffset = 0; -#$finalRowOffset = 0; # not used yet; highest row number from links table to process - -# Overwrite the old links table with the new one. If this is set to false, -# the new table will be left at links_temp. -$overwriteLinksTable = true; - -# Don't create keys, and so allow duplicates in the new links table. -# This gives a huge speed improvement for very large links tables which are MyISAM. (What about InnoDB?) -$noKeys = false; - - -$logPerformance = false; # output performance data to a file -$perfLogFilename = "convLinksPerf.txt"; -#-------------------------------------------------------------------- - -$res = wfQuery( "SELECT COUNT(*) AS count FROM links", DB_WRITE ); -$row = wfFetchObject($res); -$numRows = $row->count; -wfFreeResult( $res ); - -if ( $numRows == 0 ) { - print "No rows to convert. Updating schema...\n"; - createTempTable(); -} else { - $row = wfFetchObject( $res ); - if ( is_numeric( $row->l_from ) ) { - print "Schema already converted\n"; - exit; - } - - if ( $logPerformance ) { $fh = fopen ( $perfLogFilename, "w" ); } - $baseTime = $startTime = getMicroTime(); - # Create a title -> cur_id map - print "Loading IDs from cur table...\n"; - performanceLog ( "Reading $numRows rows from cur table...\n" ); - performanceLog ( "rows read vs seconds elapsed:\n" ); - wfBufferSQLResults( false ); - $res = wfQuery( "SELECT cur_namespace,cur_title,cur_id FROM cur", DB_WRITE ); - $ids = array(); - - while ( $row = wfFetchObject( $res ) ) { - $title = $row->cur_title; - if ( $row->cur_namespace ) { - $title = $wgLang->getNsText( $row->cur_namespace ) . ":$title"; - } - $ids[$title] = $row->cur_id; - $curRowsRead++; - if ($reportCurReadProgress) { - if (($curRowsRead % $curReadReportInterval) == 0) { - performanceLog( $curRowsRead . " " . (getMicroTime() - $baseTime) . "\n" ); - print "\t$curRowsRead rows of cur table read.\n"; - } - } - } - wfFreeResult( $res ); - wfBufferSQLResults( true ); - print "Finished loading IDs.\n\n"; - performanceLog( "Took " . (getMicroTime() - $baseTime) . " seconds to load IDs.\n\n" ); -#-------------------------------------------------------------------- - - # Now, step through the links table (in chunks of $linksConvInsertInterval rows), - # convert, and write to the new table. - createTempTable(); - performanceLog( "Resetting timer.\n\n" ); - $baseTime = getMicroTime(); - print "Processing $numRows rows from links table...\n"; - performanceLog( "Processing $numRows rows from links table...\n" ); - performanceLog( "rows inserted vs seconds elapsed:\n" ); - - for ($rowOffset = $initialRowOffset; $rowOffset < $numRows; $rowOffset += $linksConvInsertInterval) { - $sqlRead = "SELECT * FROM links LIMIT $linksConvInsertInterval OFFSET $rowOffset"; - $res = wfQuery($sqlRead, DB_READ); - if ( $noKeys ) { - $sqlWrite = array("INSERT INTO links_temp(l_from,l_to) VALUES "); - } else { - $sqlWrite = array("INSERT IGNORE INTO links_temp(l_from,l_to) VALUES "); - } - - $tuplesAdded = 0; # no tuples added to INSERT yet - while ( $row = wfFetchObject($res) ) { - $fromTitle = $row->l_from; - if ( array_key_exists( $fromTitle, $ids ) ) { # valid title - $from = $ids[$fromTitle]; - $to = $row->l_to; - if ( $tuplesAdded != 0 ) { - $sqlWrite[] = ","; - } - $sqlWrite[] = "($from,$to)"; - $tuplesAdded++; - } else { # invalid title - $numBadLinks++; - } - } - wfFreeResult($res); - #print "rowOffset: $rowOffset\ttuplesAdded: $tuplesAdded\tnumBadLinks: $numBadLinks\n"; - if ( $tuplesAdded != 0 ) { - if ($reportLinksConvProgress) { - print "Inserting $tuplesAdded tuples into links_temp..."; - } - wfQuery( implode("",$sqlWrite) , DB_WRITE ); - $totalTuplesInserted += $tuplesAdded; - if ($reportLinksConvProgress) - print " done. Total $totalTuplesInserted tuples inserted.\n"; - performanceLog( $totalTuplesInserted . " " . (getMicroTime() - $baseTime) . "\n" ); - } - } - print "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n\n"; - performanceLog( "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n" ); - performanceLog( "Total execution time: " . (getMicroTime() - $startTime) . " seconds.\n" ); - if ( $logPerformance ) { fclose ( $fh ); } -} -#-------------------------------------------------------------------- - -if ( $overwriteLinksTable ) { - $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname ); - if (!($dbConn->isOpen())) { - print "Opening connection to database failed.\n"; - exit; - } - # Check for existing links_backup, and delete it if it exists. - print "Dropping backup links table if it exists..."; - $dbConn->query( "DROP TABLE IF EXISTS links_backup", DB_WRITE); - print " done.\n"; - - # Swap in the new table, and move old links table to links_backup - print "Swapping tables 'links' to 'links_backup'; 'links_temp' to 'links'..."; - $dbConn->query( "RENAME TABLE links TO links_backup, links_temp TO links", DB_WRITE ); - print " done.\n\n"; - - $dbConn->close(); - print "Conversion complete. The old table remains at links_backup;\n"; - print "delete at your leisure.\n"; -} else { - print "Conversion complete. The converted table is at links_temp;\n"; - print "the original links table is unchanged.\n"; -} - -#-------------------------------------------------------------------- - -function createTempTable() { - global $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname; - global $noKeys; - $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname ); - - if (!($dbConn->isOpen())) { - print "Opening connection to database failed.\n"; - exit; - } - - print "Dropping temporary links table if it exists..."; - $dbConn->query( "DROP TABLE IF EXISTS links_temp", DB_WRITE); - print " done.\n"; - - print "Creating temporary links table..."; - if ( $noKeys ) { - $dbConn->query( "CREATE TABLE links_temp ( " . - "l_from int(8) unsigned NOT NULL default '0', " . - "l_to int(8) unsigned NOT NULL default '0')", DB_WRITE); - } else { - $dbConn->query( "CREATE TABLE links_temp ( " . - "l_from int(8) unsigned NOT NULL default '0', " . - "l_to int(8) unsigned NOT NULL default '0', " . - "UNIQUE KEY l_from(l_from,l_to), " . - "KEY (l_to))", DB_WRITE); - } - print " done.\n\n"; -} - -function performanceLog( $text ) { - global $logPerformance, $fh; - if ( $logPerformance ) { - fwrite( $fh, $text ); - } -} - -function getMicroTime() { # return time in seconds, with microsecond accuracy - list($usec, $sec) = explode(" ", microtime()); - return ((float)$usec + (float)$sec); -} ?> diff --git a/maintenance/dumpMessages.php b/maintenance/dumpMessages.php index e4c54601d0cc..887e1c5a2992 100644 --- a/maintenance/dumpMessages.php +++ b/maintenance/dumpMessages.php @@ -1,17 +1,12 @@ <?php require_once( "commandLine.inc" ); - +$wgMessageCache->disableTransform(); $messages = array(); foreach ( $wgAllMessagesEn as $key => $englishValue ) { $messages[$key] = wfMsg( $key ); } - -if ( count( $argv ) >= 2 ) { - $res = fopen( $argv[2] ); - fwrite( $res, serialize( $messages ) ); -} else { - print serialize( $messages ); -} +print "MediaWiki $wgVersion language file\n"; +print serialize( $messages ); ?> diff --git a/maintenance/eval.php b/maintenance/eval.php new file mode 100755 index 000000000000..6c59d0c38c4f --- /dev/null +++ b/maintenance/eval.php @@ -0,0 +1,16 @@ +<?php +require_once( "commandLine.inc" ); + +do { + $line = readconsole( "> " ); + eval( $line . ";" ); + if ( function_exists( "readline_add_history" ) ) { + readline_add_history( $line ); + } +} while ( 1 ); + + + + +?> + diff --git a/maintenance/indexes.sql b/maintenance/indexes.sql index ffa0782aeb06..fa0359069e70 100644 --- a/maintenance/indexes.sql +++ b/maintenance/indexes.sql @@ -51,7 +51,8 @@ ALTER TABLE recentchanges ADD INDEX rc_timestamp (rc_timestamp), ADD INDEX rc_namespace_title (rc_namespace, rc_title), ADD INDEX rc_cur_id (rc_cur_id), - ADD INDEX new_name_timestamp(rc_new,rc_namespace,rc_timestamp); + ADD INDEX new_name_timestamp(rc_new,rc_namespace,rc_timestamp), + ADD INDEX rc_ip (rc_ip); ALTER TABLE archive ADD KEY `name_title_timestamp` (`ar_namespace`,`ar_title`,`ar_timestamp`); diff --git a/maintenance/liveCmdLine.inc b/maintenance/liveCmdLine.inc new file mode 100644 index 000000000000..67d09d19c275 --- /dev/null +++ b/maintenance/liveCmdLine.inc @@ -0,0 +1,53 @@ +<?php + +# This is a drop-in replacement for commandLine.inc, for use only on +# the Wikimedia wikis. +# Call your command-line script with the language name and site name, +# e.g. php convertLinks.php aa wikipedia + +if ( isset( $_SERVER ) && array_key_exists( 'REQUEST_METHOD', $_SERVER ) ) { + print "This script must be run from the command line\n"; + exit(); +} + +$wgCommandLineMode = true; +$self = array_shift( $argv ); +# Turn off output buffering if it's on +@ob_end_flush(); + +if ( isset( $argv[0] ) ) { + $lang = array_shift( $argv ); +} else { + $lang = "aa"; +} +if ( isset( $argv[0] ) ) { + $site = array_shift( $argv ); +} else { + $site = "wikipedia"; +} + +# This is for the IRC scripts, which now run as the apache user +if ( $_ENV['USER'] != "apache" ) { + $wgDBadminuser = "wikiadmin"; + $wgDBadminpassword = trim(`wikiadmin_pass`); +} + +$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; +putenv( "wikilang=$lang"); +$newpath = "/home/wikipedia/common/php-new$sep"; + +$DP = "../includes"; +#ini_set( "include_path", "$newpath$sep$include_path" ); +ini_set( "include_path", "/home/wikipedia/common/php-new:/home/wikipedia/common/php-new/includes" ); + +require_once( "/home/wikipedia/common/php-new/CommonSettings.php" ); + +$wgUsePHPTal = false; + +define("MEDIAWIKI",true); +require_once( "Setup.php" ); +require_once( "../install-utils.inc" ); +$wgTitle = Title::newFromText( "Command line script" ); +$wgCommandLineMode = true; +set_time_limit(0); +?> diff --git a/maintenance/mcc.php b/maintenance/mcc.php index a6b524916ac5..6ebbfc1b35f7 100755 --- a/maintenance/mcc.php +++ b/maintenance/mcc.php @@ -1,10 +1,10 @@ <?php -require_once( "../includes/DefaultSettings.php" ); -require_once( "../LocalSettings.php" ); -require_once( "../includes/MemCachedClient.inc.php" ); -$mcc = new MemCachedClient(); +require_once( "commandLine.inc" ); + +$mcc = new memcached( array('persistant' => true) ); $mcc->set_servers( $wgMemCachedServers ); +$mcc->set_debug( true ); do { $bad = false; @@ -20,7 +20,8 @@ do { $res = $res[$args[1]]; } if ( $res === false ) { - print 'Error: ' . $mcc->error_string() . "\n"; + #print 'Error: ' . $mcc->error_string() . "\n"; + print "MemCached error\n"; } elseif ( is_string( $res ) ) { print "$res\n"; } else { @@ -35,13 +36,15 @@ do { $value = implode( " ", $args ); } if ( !$mcc->set( $key, $value, 0 ) ) { - print 'Error: ' . $mcc->error_string() . "\n"; + #print 'Error: ' . $mcc->error_string() . "\n"; + print "MemCached error\n"; } break; case "delete": $key = implode( " ", $args ); if ( !$mcc->delete( $key ) ) { - print 'Error: ' . $mcc->error_string() . "\n"; + #print 'Error: ' . $mcc->error_string() . "\n"; + print "MemCached error\n"; } break; case "quit": @@ -61,19 +64,5 @@ do { } } while ( !$quit ); -function readconsole( $prompt = "" ) { - if ( function_exists( "readline" ) ) { - return readline( $prompt ); - } else { - print $prompt; - $fp = fopen( "php://stdin", "r" ); - $resp = trim( fgets( $fp, 1024 ) ); - fclose( $fp ); - return $resp; - } -} - - - ?> diff --git a/maintenance/rebuildInterwiki.php b/maintenance/rebuildInterwiki.php new file mode 100644 index 000000000000..328e765ea01a --- /dev/null +++ b/maintenance/rebuildInterwiki.php @@ -0,0 +1,207 @@ +<? + +# Rebuild interwiki table using the file on meta and the language list +# Wikimedia specific! +$oldCwd = getcwd(); + +$optionsWithArgs = array( "o" ); +include_once( "commandLine.inc" ); + +class Site { + var $suffix, $lateral, $url; + + function Site( $s, $l, $u ) { + $this->suffix = $s; + $this->lateral = $l; + $this->url = $u; + } + + function getURL( $lang ) { + return "http://$lang.{$this->url}/wiki/\$1"; + } +} + +# Initialise lists of wikis +$sites = array( + 'wiki' => new Site( 'wiki', 'w', 'wikipedia.org' ), + 'wiktionary' => new Site( 'wiktionary', 'wikt', 'wiktionary.org' ) +); +$langlist = array_map( "trim", file( "/home/wikipedia/common/langlist" ) ); + +$specials = array( + 'sourceswiki' => 'sources.wikipedia.org', + 'quotewiki' => 'wikiquote.org', + 'textbookwiki' => 'wikibooks.org', + 'sep11wiki' => 'sep11.wikipedia.org', + 'metawiki' => 'meta.wikipedia.org', +); + +$extraLinks = array( + array( 'm', 'http://meta.wikipedia.org/wiki/$1', 1 ), + array( 'meta', 'http://meta.wikipedia.org/wiki/$1', 1 ), + array( 'sep11', 'http://sep11.wikipedia.org/wiki/$1', 1 ), +); + +$languageAliases = array( + 'zh-cn' => 'zh', + 'zh-tw' => 'zh', +); + +# Extract the intermap from meta + +$row = wfGetArray( "metawiki.cur", array( "cur_text" ), array( "cur_namespace" => 0, "cur_title" => "Interwiki_map" ) ); + +if ( !$row ) { + die( "m:Interwiki_map not found" ); +} + +$lines = explode( "\n", $row->cur_text ); +$iwArray = array(); + +foreach ( $lines as $line ) { + if ( preg_match( '/^\|\s*(.*?)\s*\|\|\s*(.*?)\s*$/', $line, $matches ) ) { + $prefix = $matches[1]; + $url = $matches[2]; + if ( preg_match( '/(wikipedia|wiktionary|wikisource|wikiquote|wikibooks)\.org/', $url ) ) { + $local = 1; + } else { + $local = 0; + } + + $iwArray[] = array( "iw_prefix" => $prefix, "iw_url" => $url, "iw_local" => $local ); + } +} + + +# Insert links into special wikis +# These have intermap links and interlanguage links pointing to wikipedia + +$sql = "-- Generated by rebuildInterwiki.php"; + +foreach ( $specials as $db => $host ) { + $sql .= "\nUSE $db;\n" . + "TRUNCATE TABLE interwiki;\n" . + "INSERT INTO interwiki (iw_prefix, iw_url, iw_local) VALUES \n"; + $first = true; + + # Intermap links + foreach ( $iwArray as $iwEntry ) { + # Suppress links to self + if ( strpos( $iwEntry['iw_url'], $host ) === false ) { + $sql .= makeLink( $iwEntry, $first ); + } + } + # w link + $sql .= makeLink( array("w", "http://en.wikipedia.org/wiki/$1", 1 ), $first ); + + # Interlanguage links to wikipedia + $sql .= makeLanguageLinks( $sites['wiki'], $first ); + + # Extra links + foreach ( $extraLinks as $link ) { + $sql .= makeLink( $link, $first ); + } + + $sql .= ";\n"; +} +$sql .= "\n"; + +# Insert links into multilanguage sites + +foreach ( $sites as $site ) { + $sql .= <<<EOS + +--- +--- {$site->suffix} +--- + +EOS; + foreach ( $langlist as $lang ) { + $db = $lang . $site->suffix; + $db = str_replace( "-", "_", $db ); + + $sql .= "USE $db;\n" . + "TRUNCATE TABLE interwiki;\n" . + "INSERT INTO interwiki (iw_prefix,iw_url,iw_local) VALUES\n"; + $first = true; + + # Intermap links + foreach ( $iwArray as $iwEntry ) { + # Suppress links to self + if ( strpos( $iwEntry['iw_url'], $site->url ) === false || + strpos( $iwEntry['iw_url'], 'meta.wikipedia.org' ) !== false ) { + $sql .= makeLink( $iwEntry, $first ); + } + } + + # Lateral links + foreach ( $sites as $targetSite ) { + # Suppress link to self + if ( $targetSite->suffix != $site->suffix ) { + $sql .= makeLink( array( $targetSite->lateral, $targetSite->getURL( $lang ), 1 ), $first ); + } + } + + # Interlanguage links + $sql .= makeLanguageLinks( $site, $first ); + + # w link within wikipedias + # Other sites already have it as a lateral link + if ( $site->suffix == "wiki" ) { + $sql .= makeLink( array("w", "http://en.wikipedia.org/wiki/$1", 1), $first ); + } + + # Extra links + foreach ( $extraLinks as $link ){ + $sql .= makeLink( $link, $first ); + } + $sql .= ";\n\n"; + } +} + +# Output +if ( isset( $options['o'] ) ) { + # To file specified with -o + chdir( $oldCwd ); + $file = fopen( $options['o'], "w" ); + fwrite( $file, $sql ); + fclose( $file ); +} else { + # To stdout + print $sql; +} + +# ------------------------------------------------------------------------------------------ + +# Returns part of an INSERT statement, corresponding to all interlanguage links to a particular site +function makeLanguageLinks( &$site, &$first ) { + global $langlist, $languageAliases; + + $sql = ""; + + # Actual languages with their own databases + foreach ( $langlist as $targetLang ) { + $sql .= makeLink( array( $targetLang, $site->getURL( $targetLang ), 1 ), $first ); + } + + # Language aliases + foreach ( $languageAliases as $alias => $lang ) { + $sql .= makeLink( array( $alias, $site->getURL( $lang ), 1 ), $first ); + } + return $sql; +} + +# Make SQL for a single link from an array +function makeLink( $entry, &$first ) { + $sql = ""; + # Add comma + if ( $first ) { + $first = false; + } else { + $sql .= ",\n"; + } + $sql .= "(" . Database::makeList( $entry ) . ")"; + return $sql; +} + +?> diff --git a/maintenance/rebuildMessages.php b/maintenance/rebuildMessages.php index d97d16f452d9..eb549b463032 100755 --- a/maintenance/rebuildMessages.php +++ b/maintenance/rebuildMessages.php @@ -1,48 +1,21 @@ <?php -$wgCommandLineMode = true; -# Turn off output buffering if it's on -@ob_end_flush(); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -if ( isset($argv[1]) && $argv[1] ) { - $lang = $argv[1]; - putenv( "wikilang=$lang"); - $settingsFile = "/apache/htdocs/{$argv[1]}/w/LocalSettings.php"; - $newpath = "/apache/common/php$sep"; -} else { - $settingsFile = "../LocalSettings.php"; - $newpath = ""; -} +require_once( "commandLine.inc" ); +include_once( "InitialiseMessages.inc" ); + +$wgTitle = Title::newFromText( "Rebuild messages script" ); -if ( isset($argv[2]) && $argv[2] == "update" ) { - $response = 1; -} elseif ( isset($argv[2]) && $argv[2] == "reinitialise" ) { - $response = 2; +if ( isset( $args[0] ) ) { + $response = array_shift( $args ); + if ( $response == "update" ) { + $response = 1; + } elseif ( $response == "rebuild" ) { + $response = 2; + } } else { $response = 0; } - -if ( ! is_readable( $settingsFile ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -ini_set( "include_path", "../includes$sep../languages$sep$newpath$IP$sep$include_path" ); - -$wgCommandLineMode = true; -$DP = "../includes"; -require_once( $settingsFile ); - -require_once( "Setup.php" ); -require_once( "./InitialiseMessages.inc" ); -require_once( "../install-utils.inc" ); -$wgTitle = Title::newFromText( "Rebuild messages script" ); -$wgCommandLineMode = true; -set_time_limit(0); - -if ( isset($argv) && count( $argv ) >= 3 ) { - $messages = loadArrayFromFile( $argv[3] ); +if ( isset( $args[0] ) ) { + $messages = loadLanguageFile( array_shift( $args ) ); } else { $messages = false; } diff --git a/maintenance/rebuildall.php b/maintenance/rebuildall.php index a9d0a9760430..d9ec307c06c0 100644 --- a/maintenance/rebuildall.php +++ b/maintenance/rebuildall.php @@ -3,31 +3,21 @@ # Rebuild link tracking tables from scratch. This takes several # hours, depending on the database size and server configuration. -if ( ! is_readable( "../LocalSettings.php" ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -$wgCommandLineMode = true; -$DP = "../includes"; -require_once( "../LocalSettings.php" ); -require_once( "../AdminSettings.php" ); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "Setup.php" ); -require_once( "./rebuildlinks.inc" ); -require_once( "./rebuildtextindex.inc" ); -require_once( "./rebuildrecentchanges.inc" ); -$wgTitle = Title::newFromText( "Rebuild links script" ); -set_time_limit(0); +require_once( "commandLine.inc" ); + +#require_once( "rebuildlinks.inc" ); +require_once( "refreshlinks.inc" ); +require_once( "rebuildtextindex.inc" ); +require_once( "rebuildrecentchanges.inc" ); $wgDBuser = $wgDBadminuser; $wgDBpassword = $wgDBadminpassword; -rebuildLinkTables(); +# Doesn't work anymore +# rebuildLinkTables(); + +# Use the slow incomplete one instead. It's designed to work in the background +#refreshLinks( 1 ); dropTextIndex(); rebuildTextIndex(); diff --git a/maintenance/rebuildlinks.php b/maintenance/rebuildlinks.php index bc4a291f976c..f47b922dbe3b 100644 --- a/maintenance/rebuildlinks.php +++ b/maintenance/rebuildlinks.php @@ -3,26 +3,10 @@ # Rebuild link tracking tables from scratch. This takes several # hours, depending on the database size and server configuration. -if ( ! is_readable( "../LocalSettings.php" ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -$wgCommandLineMode = true; -ini_set("implicit_flush", 1); - -$DP = "../includes"; -require_once( "../LocalSettings.php" ); -require_once( "../AdminSettings.php" ); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "Setup.php" ); +require_once( "commandLine.inc" ); require_once( "./rebuildlinks.inc" ); + $wgTitle = Title::newFromText( "Rebuild links script" ); -set_time_limit(0); $wgDBuser = $wgDBadminuser; $wgDBpassword = $wgDBadminpassword; diff --git a/maintenance/rebuildrecentchanges.php b/maintenance/rebuildrecentchanges.php index 3ea838b613c7..6e342b2b1609 100644 --- a/maintenance/rebuildrecentchanges.php +++ b/maintenance/rebuildrecentchanges.php @@ -3,24 +3,9 @@ # Rebuild link tracking tables from scratch. This takes several # hours, depending on the database size and server configuration. -if ( ! is_readable( "../LocalSettings.php" ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -$wgCommandLineMode = true; -$DP = "../includes"; -require_once( "../LocalSettings.php" ); -require_once( "../AdminSettings.php" ); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "Setup.php" ); +require_once( "commandLine.inc" ); require_once( "./rebuildrecentchanges.inc" ); $wgTitle = Title::newFromText( "Rebuild recent changes script" ); -set_time_limit(0); $wgDBuser = $wgDBadminuser; $wgDBpassword = $wgDBadminpassword; diff --git a/maintenance/rebuildtextindex.inc b/maintenance/rebuildtextindex.inc index 65800edf3113..26082263a20a 100644 --- a/maintenance/rebuildtextindex.inc +++ b/maintenance/rebuildtextindex.inc @@ -6,40 +6,50 @@ # Rebuilding is faster if you drop the index and recreate it, # but that will prevent searches from working while it runs. -function dropTextIndex() +define( "RTI_CHUNK_SIZE", 500 ); + +function dropTextIndex( &$database ) { if ( wfIndexExists( "searchindex", "si_title" ) ) { echo "Dropping index...\n"; $sql = "ALTER TABLE searchindex DROP INDEX si_title, DROP INDEX si_text"; - $res = wfQuery($sql, DB_WRITE, "dropTextIndex" ); + $database->query($sql, "dropTextIndex" ); } + # Truncate table, in an attempt to bring the slaves to a consistent state + # (zwinger was accidentally written to) + $database->query( "TRUNCATE TABLE searchindex", "dropTextIndex" ); } -function createTextIndex() +function createTextIndex( &$database ) { echo "Rebuild the index...\n"; $sql = "ALTER TABLE searchindex ADD FULLTEXT si_title (si_title), " . "ADD FULLTEXT si_text (si_text)"; - $res = wfQuery($sql, DB_WRITE, "createTextIndex" ); + $database->query($sql, "createTextIndex" ); } -function rebuildTextIndex() +function rebuildTextIndex( &$database ) { - $sql = "SELECT COUNT(*) AS count FROM cur"; - $res = wfQuery($sql, DB_READ, "rebuildTextIndex" ); + $sql = "SELECT MAX(cur_id) AS count FROM cur"; + $res = $database->query($sql, "rebuildTextIndex" ); $s = wfFetchObject($res); - echo "Rebuilding index fields for {$s->count} pages...\n"; + $count = $s->count; + echo "Rebuilding index fields for {$count} pages...\n"; $n = 0; - $sql = "SELECT cur_id, cur_namespace, cur_title, cur_text FROM cur"; - $res = wfQuery($sql, DB_READ, "rebuildTextIndex" ); - - while( $s = wfFetchObject($res) ) { - $u = new SearchUpdate( $s->cur_id, $s->cur_title, $s->cur_text ); - $u->doUpdate(); - if ( ( (++$n) % 500) == 0) { echo "$n\n"; } + while ( $n < $count ) { + print "$n\n"; + $end = $n + RTI_CHUNK_SIZE - 1; + $sql = "SELECT cur_id, cur_namespace, cur_title, cur_text FROM cur WHERE cur_id BETWEEN $n AND $end"; + $res = $database->query($sql, "rebuildTextIndex" ); + + while( $s = wfFetchObject($res) ) { + $u = new SearchUpdate( $s->cur_id, $s->cur_title, $s->cur_text ); + $u->doUpdate(); + } + wfFreeResult( $res ); + $n += RTI_CHUNK_SIZE; } - wfFreeResult( $res ); } ?> diff --git a/maintenance/rebuildtextindex.php b/maintenance/rebuildtextindex.php index e9366817292f..247fedc523b4 100644 --- a/maintenance/rebuildtextindex.php +++ b/maintenance/rebuildtextindex.php @@ -1,33 +1,16 @@ <?php - # Rebuild search index table from scratch. This takes several # hours, depending on the database size and server configuration. -if ( ! is_readable( "../LocalSettings.php" ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -$wgCommandLineMode = true; -$DP = "../includes"; -require_once( "../LocalSettings.php" ); -require_once( "../AdminSettings.php" ); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "Setup.php" ); -require_once( "./rebuildtextindex.inc" ); +require_once( "commandLine.inc" ); +require_once( "rebuildtextindex.inc" ); $wgTitle = Title::newFromText( "Rebuild text index script" ); -set_time_limit(0); -$wgDBuser = $wgDBadminuser; -$wgDBpassword = $wgDBadminpassword; +$database = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname ); -dropTextIndex(); -rebuildTextIndex(); -createTextIndex(); +dropTextIndex( $database ); +rebuildTextIndex( $database ); +createTextIndex( $database ); print "Done.\n"; exit(); diff --git a/maintenance/refreshLinks.inc b/maintenance/refreshLinks.inc new file mode 100644 index 000000000000..42e1138e0e86 --- /dev/null +++ b/maintenance/refreshLinks.inc @@ -0,0 +1,46 @@ +<?php + +define( "REPORTING_INTERVAL", 50 ); +define( "PAUSE_INTERVAL", 50 ); + +function refreshLinks( $start ) { + global $wgUser, $wgTitle, $wgArticle, $wgEnablePersistentLC, $wgLinkCache, $wgOut; + + $res = wfQuery("SELECT max(cur_id) as m FROM cur", DB_READ); + $row = wfFetchObject( $res ); + $end = $row->m; + + print("Refreshing link table. Starting from cur_id $start of $end.\n"); + + # Don't generate TeX PNGs (lack of a sensible current directory causes errors anyway) + $wgUser->setOption("math", 3); + + for ($id = $start; $id <= $end; $id++) { + if ( !($id % REPORTING_INTERVAL) ) { + print "$id\n"; + } + + if ( !($id % PAUSE_INTERVAL) ) { + sleep(1); + } + + $wgTitle = Title::newFromID( $id ); + if ( is_null( $wgTitle ) ) { + continue; + } + + $wgArticle = new Article( $wgTitle ); + $text = $wgArticle->getContent( true ); + $wgLinkCache = new LinkCache; + $wgOut->addWikiText( $text ); + + if ( $wgEnablePersistentLC ) { + $wgLinkCache->saveToLinkscc( $id, wfStrencode( $wgTitle->getPrefixedDBkey() ) ); + } + + $linksUpdate = new LinksUpdate( $id, $wgTitle->getPrefixedDBkey() ); + $linksUpdate->doDumbUpdate(); + $linksUpdate->fixBrokenLinks(); + } +} +?> diff --git a/maintenance/refreshLinks.php b/maintenance/refreshLinks.php index 089e6a70b5b0..6d6ddc3f0494 100644 --- a/maintenance/refreshLinks.php +++ b/maintenance/refreshLinks.php @@ -1,8 +1,8 @@ <?php -define( "REPORTING_INTERVAL", 50 ); -define( "PAUSE_INTERVAL", 50 ); require_once( "commandLine.inc" ); +require_once( "refreshLinks.inc" ); + error_reporting( E_ALL & (~E_NOTICE) ); @@ -12,42 +12,7 @@ if ($argv[2]) { $start = 1; } -$res = wfQuery("SELECT max(cur_id) as m FROM cur", DB_READ); -$row = wfFetchObject( $res ); -$end = $row->m; - -print("Refreshing link table. Starting from cur_id $start of $end.\n"); - -# Don't generate TeX PNGs (lack of a sensible current directory causes errors anyway) -$wgUser->setOption("math", 3); - -for ($id = $start; $id <= $end; $id++) { - if ( !($id % REPORTING_INTERVAL) ) { - print "$id\n"; - } - - if ( !($id % PAUSE_INTERVAL) ) { - sleep(1); - } - - $wgTitle = Title::newFromID( $id ); - if ( is_null( $wgTitle ) ) { - continue; - } - - $wgArticle = new Article( $wgTitle ); - $text = $wgArticle->getContent( true ); - $wgLinkCache = new LinkCache; - @$wgOut->addWikiText( $text ); - - if ( $wgEnablePersistentLC ) { - $wgLinkCache->saveToLinkscc( $id, wfStrencode( $wgTitle->getPrefixedDBkey() ) ); - } - - $linksUpdate = new LinksUpdate( $id, $wgTitle->getPrefixedDBkey() ); - $linksUpdate->doDumbUpdate(); - $linksUpdate->fixBrokenLinks(); -} +refreshLinks( $start ); exit(); diff --git a/maintenance/remove-brokenlinks.php b/maintenance/remove-brokenlinks.php index 89a30033990e..7faecef568dd 100644 --- a/maintenance/remove-brokenlinks.php +++ b/maintenance/remove-brokenlinks.php @@ -1,25 +1,9 @@ <?php # Remove spurious brokenlinks - -if ( ! is_readable( "../LocalSettings.php" ) ) { - print "A copy of your installation's LocalSettings.php\n" . - "must exist in the source directory.\n"; - exit(); -} - -$wgCommandLineMode = true; -$DP = "../includes"; -require_once( "../LocalSettings.php" ); -require_once( "../AdminSettings.php" ); - -$sep = strchr( $include_path = ini_get( "include_path" ), ";" ) ? ";" : ":"; -ini_set( "include_path", "$IP$sep$include_path" ); - -require_once( "Setup.php" ); +require_once( "commandLine.inc" ); require_once( "./rebuildrecentchanges.inc" ); $wgTitle = Title::newFromText( "Rebuild brokenlinks script" ); -set_time_limit(0); $wgDBuser = $wgDBadminuser; $wgDBpassword = $wgDBadminpassword; diff --git a/maintenance/tables.sql b/maintenance/tables.sql index 4f256c702fd9..da269c6bb990 100644 --- a/maintenance/tables.sql +++ b/maintenance/tables.sql @@ -189,7 +189,8 @@ CREATE TABLE recentchanges ( rc_last_oldid int(10) unsigned NOT NULL default '0', rc_type tinyint(3) unsigned NOT NULL default '0', rc_moved_to_ns tinyint(3) unsigned NOT NULL default '0', - rc_moved_to_title varchar(255) binary NOT NULL default '' + rc_moved_to_title varchar(255) binary NOT NULL default '', + rc_ip char(15) NOT NULL default '' ) PACK_KEYS=1; CREATE TABLE watchlist ( @@ -242,3 +243,10 @@ CREATE TABLE objectcache ( unique key (keyname), key (exptime) ); + +-- For storing revision text +CREATE TABLE blobs ( + blob_index char(255) binary NOT NULL default '', + blob_data longblob NOT NULL default '', + UNIQUE key blob_index (blob_index) +); diff --git a/maintenance/trivialCmdLine.php b/maintenance/trivialCmdLine.php new file mode 100644 index 000000000000..4a0bb39cfa4b --- /dev/null +++ b/maintenance/trivialCmdLine.php @@ -0,0 +1,11 @@ +<?php +require_once( "commandLine.inc" ); +print "DB name: $wgDBname\n"; +print "DB user: $wgDBuser\n"; +print "DB password: $wgDBpassword\n"; + +$res = wfQuery( "SELECT MAX(cur_id) as m FROM cur", DB_READ ); +$row = wfFetchObject( $res ); +print "Max cur_id: {$row->m}\n"; + +?> diff --git a/maintenance/update2.php b/maintenance/update2.php new file mode 100644 index 000000000000..b8712091cde6 --- /dev/null +++ b/maintenance/update2.php @@ -0,0 +1,43 @@ +<?php + +# This script was used to convert the live Wikimedia wikis from 1.2 to 1.3 + +$maintenance = "/home/wikipedia/common/php-new/maintenance"; +require_once( "$maintenance/liveCmdLine.inc" ); +require_once( "$maintenance/InitialiseMessages.inc" ); +require_once( "$maintenance/updaters.inc" ); +require_once( "$maintenance/archives/moveCustomMessages.inc" ); +require_once( "$maintenance/convertLinks.inc" ); +require_once( "$maintenance/../install-utils.inc" ); + +$wgDatabase = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname ); +do_ipblocks_update(); flush(); +do_interwiki_update(); flush(); +do_index_update(); flush(); +do_linkscc_update(); flush(); +do_linkscc_1_3_update(); flush(); +do_hitcounter_update(); flush(); +do_recentchanges_update(); flush(); +do_user_real_name_update(); flush(); +do_querycache_update(); flush(); +do_objectcache_update(); flush(); +do_categorylinks_update(); flush(); +initialiseMessages(); flush(); +moveCustomMessages( 1 ); + +if ( file_exists( $wgReadOnlyFile ) ) { + $alreadyExists = true; +} else { + $file = fopen( $wgReadOnlyFile, "w" ); + fwrite( $file, "The database is temporarily locked for a software upgrade\n" ); + fclose( $file ); + $alreadyExists = false; +} + +convertLinks(); + +if ( !$alreadyExists ) { + unlink( $wgReadOnlyFile ); +} + +?> diff --git a/maintenance/updateSearchIndex.inc b/maintenance/updateSearchIndex.inc new file mode 100644 index 000000000000..74d8d3149535 --- /dev/null +++ b/maintenance/updateSearchIndex.inc @@ -0,0 +1,92 @@ +<?php + +function updateSearchIndex( $start, $end, $maxLockTime, $quiet ) { + global $wgQuiet; + global $wgDisableSearchUpdate; + + $fname = "updateSearchIndex"; + + $wgQuiet = $quiet; + $wgDisableSearchUpdate = false; + + output( "Updating searchindex between $start and $end\n" ); + + # Select entries from recentchanges which are on top and between the specified times + $start = wfStrencode( $start ); + $end = wfStrencode( $end ); + + $sql = "SELECT rc_cur_id,rc_type,rc_moved_to_ns,rc_moved_to_title FROM recentchanges + WHERE rc_this_oldid=0 AND rc_timestamp BETWEEN '$start' AND '$end'"; + $res = wfQuery( $sql, DB_READ, $fname ); + + # Lock searchindex + if ( $maxLockTime ) { + output( " --- Waiting for lock ---" ); + lockSearchindex(); + $lockTime = time(); + output( "\n" ); + } + + # Loop through the results and do a search update + while ( $row = wfFetchObject( $res ) ) { + # Allow reads to be processed + if ( $maxLockTime && time() > $lockTime + $maxLockTime ) { + output( " --- Relocking ---" ); + relockSearchindex(); + $lockTime = time(); + output( "\n" ); + } + if ( $row->rc_type == RC_LOG ) { + continue; + } elseif ( $row->rc_type == RC_MOVE || $row->rc_type == RC_MOVE_OVER_REDIRECT ) { + # Rename searchindex entry + $titleObj = Title::makeTitle( $row->rc_moved_to_ns, $row->rc_moved_to_title ); + $title = $titleObj->getPrefixedDBkey(); + output( "$title..." ); + $u = new SearchUpdate( $row->rc_cur_id, $title, false ); + output( "\n" ); + } else { + # Get cur row + $curRow = wfGetArray( 'cur', array( 'cur_namespace', 'cur_title', 'cur_text' ), array( 'cur_id' => $row->rc_cur_id ) ); + if ( $curRow ) { + $titleObj = Title::makeTitle( $curRow->cur_namespace, $curRow->cur_title ); + $title = $titleObj->getPrefixedDBkey(); + output( $title ); + # Update searchindex + $u = new SearchUpdate( $row->rc_cur_id, $curRow->cur_title, $curRow->cur_text ); + $u->doUpdate(); + output( "\n" ); + } + } + } + + # Unlock searchindex + if ( $maxLockTime ) { + unlockSearchindex(); + } + output( "Done\n" ); +} + +function lockSearchindex() { + wfQuery( "LOCK TABLES searchindex LOW_PRIORITY WRITE, cur READ", DB_WRITE ); +} + +function unlockSearchindex() { + wfQuery( "UNLOCK TABLES", DB_WRITE ); +} + +# Unlock and lock again +# Since the lock is low-priority, queued reads will be able to complete +function relockSearchindex() { + unlockSearchindex(); + lockSearchindex(); +} + +function output( $text ) { + global $wgQuiet; + if ( !$wgQuiet ) { + print $text; + } +} + +?> diff --git a/maintenance/updateSearchIndex.php b/maintenance/updateSearchIndex.php new file mode 100644 index 000000000000..75b789e785fa --- /dev/null +++ b/maintenance/updateSearchIndex.php @@ -0,0 +1,52 @@ +<?php + +# Script for periodic off-peak updating of the search index + +# Usage: php updateSearchIndex.php [-s START] [-e END] [-p POSFILE] [-l LOCKTIME] [-q] +# Where START is the starting timestamp +# END is the ending timestamp +# POSFILE is a file to load timestamps from and save them to, searchUpdate.pos by default +# LOCKTIME is how long the searchindex and cur tables will be locked for +# -q means quiet + +$optionsWithArgs = array( 's', 'e', 'p' ); + +require_once( 'commandLine.inc' ); +require_once( 'updateSearchIndex.inc' ); + +if ( isset( $options['p'] ) ) { + $posFile = $options['p']; +} else { + $posFile = 'searchUpdate.pos'; +} + +if ( isset( $options['e'] ) ) { + $end = $options['e']; +} else { + $end = wfTimestampNow(); +} + +if ( isset( $options['s'] ) ) { + $start = $options['s']; +} else { + $start = @file_get_contents( $posFile ); + if ( !$start ) { + $start = wfUnix2Timestamp( time() - 86400 ); + } +} + +if ( isset( $options['l'] ) ) { + $lockTime = $options['l']; +} else { + $lockTime = 20; +} + +$quiet = (bool)(@$options['q']); + +updateSearchIndex( $start, $end, $lockTime, $quiet ); + +$file = fopen( $posFile, 'w' ); +fwrite( $file, $end ); +fclose( $file ); + +?> diff --git a/maintenance/updaters.inc b/maintenance/updaters.inc index d557ebb5e8fa..8ed663be0c8b 100644 --- a/maintenance/updaters.inc +++ b/maintenance/updaters.inc @@ -102,6 +102,19 @@ function do_linkscc_update() { } } +function do_linkscc_1_3_update() { + // Update linkscc table to 1.3 schema if necessary + global $wgDatabase, $wgVersion; + if( ( strpos( "1.3", $wgVersion ) === 0 ) && $wgDatabase->tableExists( "linkscc" ) + && $wgDatabase->fieldExists( "linkscc", "lcc_title" ) ) { + echo "Altering lcc_title field from linkscc table... "; + dbsource( "maintenance/archives/patch-linkscc-1.3.sql", $wgDatabase ); + echo "ok\n"; + } else { + echo "...linkscc is up to date, or does not exist. Good.\n"; + } +} + function do_hitcounter_update() { // Create hitcounter if necessary global $wgDatabase; @@ -121,6 +134,11 @@ function do_recentchanges_update() { dbsource( "maintenance/archives/patch-rc_type.sql" , $wgDatabase ); echo "ok\n"; } + if ( !$wgDatabase->fieldExists( "recentchanges", "rc_ip" ) ) { + echo "Adding rc_ip..."; + dbsource( "maintenance/archives/patch-rc_ip.sql", $wgDatabase ); + echo "ok\n"; + } } function do_user_real_name_update() { @@ -166,5 +184,4 @@ function do_categorylinks_update() { echo "ok\n"; } } - -?>
\ No newline at end of file +?> diff --git a/maintenance/wikipedia-interwiki.sql b/maintenance/wikipedia-interwiki.sql index 36ad2c5b9341..c42b47d8f681 100644 --- a/maintenance/wikipedia-interwiki.sql +++ b/maintenance/wikipedia-interwiki.sql @@ -25,6 +25,7 @@ REPLACE INTO interwiki (iw_prefix,iw_url,iw_local) VALUES ('bo','http://bo.wikipedia.org/wiki/$1',1), ('bs','http://bs.wikipedia.org/wiki/$1',1), ('ca','http://ca.wikipedia.org/wiki/$1',1), +('chr','http://chr.wikipedia.org/wiki/$1',1), ('co','http://co.wikipedia.org/wiki/$1',1), ('cs','http://cs.wikipedia.org/wiki/$1',1), ('csb','http://csb.wikipedia.org/wiki/$1',1), diff --git a/maintenance/wiktionary-interwiki.sql b/maintenance/wiktionary-interwiki.sql index d01dcd4b211d..a2863a87ccd3 100644 --- a/maintenance/wiktionary-interwiki.sql +++ b/maintenance/wiktionary-interwiki.sql @@ -25,6 +25,7 @@ REPLACE INTO interwiki (iw_prefix,iw_url,iw_local) VALUES ('bo','http://bo.wiktionary.org/wiki/$1',1), ('bs','http://bs.wiktionary.org/wiki/$1',1), ('ca','http://ca.wiktionary.org/wiki/$1',1), +('chr','http://chr.wiktionary.org/wiki/$1',1), ('co','http://co.wiktionary.org/wiki/$1',1), ('cs','http://cs.wiktionary.org/wiki/$1',1), ('csb','http://csb.wiktionary.org/wiki/$1',1), |