.\" Automatically generated by Pod::Man 4.14 (Pod::Simple 3.42) .\" .\" Standard preamble: .\" ======================================================================== .de Sp \" Vertical space (when we can't use .PP) .if t .sp .5v .if n .sp .. .de Vb \" Begin verbatim text .ft CW .nf .ne \\$1 .. .de Ve \" End verbatim text .ft R .fi .. .\" Set up some character translations and predefined strings. \*(-- will .\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left .\" double quote, and \*(R" will give a right double quote. \*(C+ will .\" give a nicer C++. Capital omega is used to do unbreakable dashes and .\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff, .\" nothing in troff, for use with C<>. .tr \(*W- .ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p' .ie n \{\ . ds -- \(*W- . ds PI pi . if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch . if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch . ds L" "" . ds R" "" . ds C` "" . ds C' "" 'br\} .el\{\ . ds -- \|\(em\| . ds PI \(*p . ds L" `` . ds R" '' . ds C` . ds C' 'br\} .\" .\" Escape single quotes in literal strings from groff's Unicode transform. .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" .\" If the F register is >0, we'll generate index entries on stderr for .\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index .\" entries marked with X<> in POD. Of course, you'll have to process the .\" output yourself in some meaningful fashion. .\" .\" Avoid warning from groff about undefined register 'F'. .de IX .. .nr rF 0 .if \n(.g .if rF .nr rF 1 .if (\n(rF:(\n(.g==0)) \{\ . if \nF \{\ . de IX . tm Index:\\$1\t\\n%\t"\\$2" .. . if !\nF==2 \{\ . nr % 0 . nr F 2 . \} . \} .\} .rr rF .\" ======================================================================== .\" .IX Title "MediaWiki::DumpFile::Compat::Pages 3pm" .TH MediaWiki::DumpFile::Compat::Pages 3pm "2022-06-15" "perl v5.34.0" "User Contributed Perl Documentation" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l .nh .SH "NAME" Parse::MediaWikiDump::Pages \- Object capable of processing dump files with a single revision per article .SH "ABOUT" .IX Header "ABOUT" This object is used to access the metadata associated with a MediaWiki instance and provide an iterative interface for extracting the individual articles out of the same. This module does not allow more than one revision for each specific article; to parse a comprehensive dump file use the Parse::MediaWikiDump::Revisions object. .SH "SYNOPSIS" .IX Header "SYNOPSIS" .Vb 1 \& use MediaWiki::DumpFile::Compat; \& \& $pmwd = Parse::MediaWikiDump\->new; \& \& $input = \*(Aqpages\-articles.xml\*(Aq; \& $input = \e*FILEHANDLE; \& \& $pages = $pmwd\->pages(); \& $pages = $pmwd\->pages(); \& $pages = $pmwd\->pages(input => $input, fast_mode => 0); \& \& #print the title and id of each article inside the dump file \& while(defined($page = $pages\->next)) { \& print "title \*(Aq", $page\->title, "\*(Aq id ", $page\->id, "\en"; \& } .Ve .SH "METHODS" .IX Header "METHODS" .ie n .IP "$pages\->new" 4 .el .IP "\f(CW$pages\fR\->new" 4 .IX Item "$pages->new" Open the specified MediaWiki dump file. If the single argument to this method is a string it will be used as the path to the file to open. If the argument is a reference to a filehandle the contents will be read from the filehandle as specified. .Sp If more than one argument is supplied the arguments must be a hash of configuration options. The input option is required and is the same as previously described. The fast_mode option is optional, defaults to being off, and if set to a true value will cause the parser to run in a mode that is much faster but only provides access to the title and text contents of a page. See the MediaWiki::DumpFile::Pages for details about fast mode. .ie n .IP "$pages\->next" 4 .el .IP "\f(CW$pages\fR\->next" 4 .IX Item "$pages->next" Returns an instance of the next available Parse::MediaWikiDump::page object or returns undef if there are no more articles left. .ie n .IP "$pages\->version" 4 .el .IP "\f(CW$pages\fR\->version" 4 .IX Item "$pages->version" Returns a plain text string of the dump file format revision number .ie n .IP "$pages\->sitename" 4 .el .IP "\f(CW$pages\fR\->sitename" 4 .IX Item "$pages->sitename" Returns a plain text string that is the name of the MediaWiki instance. .ie n .IP "$pages\->base" 4 .el .IP "\f(CW$pages\fR\->base" 4 .IX Item "$pages->base" Returns the \s-1URL\s0 to the instances main article in the form of a string. .ie n .IP "$pages\->generator" 4 .el .IP "\f(CW$pages\fR\->generator" 4 .IX Item "$pages->generator" Returns a string containing 'MediaWiki' and a version number of the instance that dumped this file. Example: 'MediaWiki 1.14alpha' .ie n .IP "$pages\->case" 4 .el .IP "\f(CW$pages\fR\->case" 4 .IX Item "$pages->case" Returns a string describing the case sensitivity configured in the instance. .ie n .IP "$pages\->namespaces" 4 .el .IP "\f(CW$pages\fR\->namespaces" 4 .IX Item "$pages->namespaces" Returns a reference to an array of references. Each reference is to another array with the first item being the unique identifier of the namespace and the second element containing a string that is the name of the namespace. .ie n .IP "$pages\->namespaces_names" 4 .el .IP "\f(CW$pages\fR\->namespaces_names" 4 .IX Item "$pages->namespaces_names" Returns an array reference the array contains strings of all the namespaces each as an element. .ie n .IP "$pages\->current_byte" 4 .el .IP "\f(CW$pages\fR\->current_byte" 4 .IX Item "$pages->current_byte" Returns the number of bytes that has been processed so far .ie n .IP "$pages\->size" 4 .el .IP "\f(CW$pages\fR\->size" 4 .IX Item "$pages->size" Returns the total size of the dump file in bytes. .SS "Scan an article dump file for double redirects that exist in the most recent article revision" .IX Subsection "Scan an article dump file for double redirects that exist in the most recent article revision" .Vb 1 \& #!/usr/bin/perl \& \& #progress information goes to STDERR, a list of double redirects found \& #goes to STDOUT \& \& binmode(STDOUT, ":utf8"); \& binmode(STDERR, ":utf8"); \& \& use strict; \& use warnings; \& use MediaWiki::DumpFile::Compat; \& \& my $file = shift(@ARGV); \& my $pmwd = Parse::MediaWikiDump\->new; \& my $pages; \& my $page; \& my %redirs; \& my $artcount = 0; \& my $file_size; \& my $start = time; \& \& if (defined($file)) { \& $file_size = (stat($file))[7]; \& $pages = $pmwd\->pages($file); \& } else { \& print STDERR "No file specified, using standard input\en"; \& $pages = $pmwd\->pages(\e*STDIN); \& } \& \& #the case of the first letter of titles is ignored \- force this option \& #because the other values of the case setting are unknown \& die \*(Aqthis program only supports the first\-letter case setting\*(Aq unless \& $pages\->case eq \*(Aqfirst\-letter\*(Aq; \& \& print STDERR "Analyzing articles:\en"; \& \& while(defined($page = $pages\->next)) { \& update_ui() if ++$artcount % 500 == 0; \& \& #main namespace only \& next unless $page\->namespace eq \*(Aq\*(Aq; \& next unless defined($page\->redirect); \& \& my $title = case_fixer($page\->title); \& #create a list of redirects indexed by their original name \& $redirs{$title} = case_fixer($page\->redirect); \& } \& \& my $redir_count = scalar(keys(%redirs)); \& print STDERR "done; searching $redir_count redirects:\en"; \& \& my $count = 0; \& \& #if a redirect location is also a key to the index we have a double redirect \& foreach my $key (keys(%redirs)) { \& my $redirect = $redirs{$key}; \& \& if (defined($redirs{$redirect})) { \& print "$key\en"; \& $count++; \& } \& } \& \& print STDERR "discovered $count double redirects\en"; \& \& #removes any case sensativity from the very first letter of the title \& #but not from the optional namespace name \& sub case_fixer { \& my $title = shift; \& \& #check for namespace \& if ($title =~ /^(.+?):(.+)/) { \& $title = $1 . \*(Aq:\*(Aq . ucfirst($2); \& } else { \& $title = ucfirst($title); \& } \& \& return $title; \& } \& \& sub pretty_bytes { \& my $bytes = shift; \& my $pretty = int($bytes) . \*(Aq bytes\*(Aq; \& \& if (($bytes = $bytes / 1024) > 1) { \& $pretty = int($bytes) . \*(Aq kilobytes\*(Aq; \& } \& \& if (($bytes = $bytes / 1024) > 1) { \& $pretty = sprintf("%0.2f", $bytes) . \*(Aq megabytes\*(Aq; \& } \& \& if (($bytes = $bytes / 1024) > 1) { \& $pretty = sprintf("%0.4f", $bytes) . \*(Aq gigabytes\*(Aq; \& } \& \& return $pretty; \& } \& \& sub pretty_number { \& my $number = reverse(shift); \& $number =~ s/(...)/$1,/g; \& $number = reverse($number); \& $number =~ s/^,//; \& \& return $number; \& } \& \& sub update_ui { \& my $seconds = time \- $start; \& my $bytes = $pages\->current_byte; \& \& print STDERR " ", pretty_number($artcount), " articles; "; \& print STDERR pretty_bytes($bytes), " processed; "; \& \& if (defined($file_size)) { \& my $percent = int($bytes / $file_size * 100); \& \& print STDERR "$percent% completed\en"; \& } else { \& my $bytes_per_second = int($bytes / $seconds); \& print STDERR pretty_bytes($bytes_per_second), " per second\en"; \& } \& } .Ve .SS "Version 0.4" .IX Subsection "Version 0.4" This class was updated to support version 0.4 dump files from a MediaWiki instance but it does not currently support any of the new information available in those files.