Wikipedia-like list of all content pages - text

Wikipedia uses an "HTML sitemap" to link to every single content page. The huge amount of pages has to be split into lots of groups so that every page has a maximum of ca. 100 links, of course.
This is how Wikipedia does it:
Special: All pages
The whole list of articles is divided into several larger groups which are defined by their first and last word each:
"AAA rating" to "early adopter"
"earth" to "lamentation"
"low" to "priest"
...
When you click one single category, this range (e.g. "earth" to "lamentation") is divided likewise. This procedure is repeated until the current range includes only ca. 100 articles so that they can be displayed.
I really like this approach to link lists which minimizes the number of clicks needed to reach any article.
How can you create such an article list automatically?
So my question is how one could automatically create such an index page which allows clicks to smaller categories until the number of articles contained is small enough to display them.
Imagine an array of all article names is given, how would you start to program an index with automatical category-splitting?
Array('AAA rating', 'abdicate', ..., 'zero', 'zoo')
It would be great if you could help me. I don't need a perfect solution but a useful approach, of course. Thank you very much in advance!
Edit: Found the part in Wikipedia's software (MediaWiki) now:
<?php
/**
* Implements Special:Allpages
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
* #file
* #ingroup SpecialPage
*/
/**
* Implements Special:Allpages
*
* #ingroup SpecialPage
*/
class SpecialAllpages extends IncludableSpecialPage {
/**
* Maximum number of pages to show on single subpage.
*/
protected $maxPerPage = 345;
/**
* Maximum number of pages to show on single index subpage.
*/
protected $maxLineCount = 100;
/**
* Maximum number of chars to show for an entry.
*/
protected $maxPageLength = 70;
/**
* Determines, which message describes the input field 'nsfrom'.
*/
protected $nsfromMsg = 'allpagesfrom';
function __construct( $name = 'Allpages' ){
parent::__construct( $name );
}
/**
* Entry point : initialise variables and call subfunctions.
*
* #param $par String: becomes "FOO" when called like Special:Allpages/FOO (default NULL)
*/
function execute( $par ) {
global $wgRequest, $wgOut, $wgContLang;
$this->setHeaders();
$this->outputHeader();
$wgOut->allowClickjacking();
# GET values
$from = $wgRequest->getVal( 'from', null );
$to = $wgRequest->getVal( 'to', null );
$namespace = $wgRequest->getInt( 'namespace' );
$namespaces = $wgContLang->getNamespaces();
$wgOut->setPagetitle(
( $namespace > 0 && in_array( $namespace, array_keys( $namespaces) ) ) ?
wfMsg( 'allinnamespace', str_replace( '_', ' ', $namespaces[$namespace] ) ) :
wfMsg( 'allarticles' )
);
if( isset($par) ) {
$this->showChunk( $namespace, $par, $to );
} elseif( isset($from) && !isset($to) ) {
$this->showChunk( $namespace, $from, $to );
} else {
$this->showToplevel( $namespace, $from, $to );
}
}
/**
* HTML for the top form
*
* #param $namespace Integer: a namespace constant (default NS_MAIN).
* #param $from String: dbKey we are starting listing at.
* #param $to String: dbKey we are ending listing at.
*/
function namespaceForm( $namespace = NS_MAIN, $from = '', $to = '' ) {
global $wgScript;
$t = $this->getTitle();
$out = Xml::openElement( 'div', array( 'class' => 'namespaceoptions' ) );
$out .= Xml::openElement( 'form', array( 'method' => 'get', 'action' => $wgScript ) );
$out .= Html::hidden( 'title', $t->getPrefixedText() );
$out .= Xml::openElement( 'fieldset' );
$out .= Xml::element( 'legend', null, wfMsg( 'allpages' ) );
$out .= Xml::openElement( 'table', array( 'id' => 'nsselect', 'class' => 'allpages' ) );
$out .= "<tr>
<td class='mw-label'>" .
Xml::label( wfMsg( 'allpagesfrom' ), 'nsfrom' ) .
" </td>
<td class='mw-input'>" .
Xml::input( 'from', 30, str_replace('_',' ',$from), array( 'id' => 'nsfrom' ) ) .
" </td>
</tr>
<tr>
<td class='mw-label'>" .
Xml::label( wfMsg( 'allpagesto' ), 'nsto' ) .
" </td>
<td class='mw-input'>" .
Xml::input( 'to', 30, str_replace('_',' ',$to), array( 'id' => 'nsto' ) ) .
" </td>
</tr>
<tr>
<td class='mw-label'>" .
Xml::label( wfMsg( 'namespace' ), 'namespace' ) .
" </td>
<td class='mw-input'>" .
Xml::namespaceSelector( $namespace, null ) . ' ' .
Xml::submitButton( wfMsg( 'allpagessubmit' ) ) .
" </td>
</tr>";
$out .= Xml::closeElement( 'table' );
$out .= Xml::closeElement( 'fieldset' );
$out .= Xml::closeElement( 'form' );
$out .= Xml::closeElement( 'div' );
return $out;
}
/**
* #param $namespace Integer (default NS_MAIN)
* #param $from String: list all pages from this name
* #param $to String: list all pages to this name
*/
function showToplevel( $namespace = NS_MAIN, $from = '', $to = '' ) {
global $wgOut;
# TODO: Either make this *much* faster or cache the title index points
# in the querycache table.
$dbr = wfGetDB( DB_SLAVE );
$out = "";
$where = array( 'page_namespace' => $namespace );
$from = Title::makeTitleSafe( $namespace, $from );
$to = Title::makeTitleSafe( $namespace, $to );
$from = ( $from && $from->isLocal() ) ? $from->getDBkey() : null;
$to = ( $to && $to->isLocal() ) ? $to->getDBkey() : null;
if( isset($from) )
$where[] = 'page_title >= '.$dbr->addQuotes( $from );
if( isset($to) )
$where[] = 'page_title <= '.$dbr->addQuotes( $to );
global $wgMemc;
$key = wfMemcKey( 'allpages', 'ns', $namespace, $from, $to );
$lines = $wgMemc->get( $key );
$count = $dbr->estimateRowCount( 'page', '*', $where, __METHOD__ );
$maxPerSubpage = intval($count/$this->maxLineCount);
$maxPerSubpage = max($maxPerSubpage,$this->maxPerPage);
if( !is_array( $lines ) ) {
$options = array( 'LIMIT' => 1 );
$options['ORDER BY'] = 'page_title ASC';
$firstTitle = $dbr->selectField( 'page', 'page_title', $where, __METHOD__, $options );
$lastTitle = $firstTitle;
# This array is going to hold the page_titles in order.
$lines = array( $firstTitle );
# If we are going to show n rows, we need n+1 queries to find the relevant titles.
$done = false;
while( !$done ) {
// Fetch the last title of this chunk and the first of the next
$chunk = ( $lastTitle === false )
? array()
: array( 'page_title >= ' . $dbr->addQuotes( $lastTitle ) );
$res = $dbr->select( 'page', /* FROM */
'page_title', /* WHAT */
array_merge($where,$chunk),
__METHOD__,
array ('LIMIT' => 2, 'OFFSET' => $maxPerSubpage - 1, 'ORDER BY' => 'page_title ASC')
);
$s = $dbr->fetchObject( $res );
if( $s ) {
array_push( $lines, $s->page_title );
} else {
// Final chunk, but ended prematurely. Go back and find the end.
$endTitle = $dbr->selectField( 'page', 'MAX(page_title)',
array_merge($where,$chunk),
__METHOD__ );
array_push( $lines, $endTitle );
$done = true;
}
$s = $res->fetchObject();
if( $s ) {
array_push( $lines, $s->page_title );
$lastTitle = $s->page_title;
} else {
// This was a final chunk and ended exactly at the limit.
// Rare but convenient!
$done = true;
}
$res->free();
}
$wgMemc->add( $key, $lines, 3600 );
}
// If there are only two or less sections, don't even display them.
// Instead, display the first section directly.
if( count( $lines ) <= 2 ) {
if( !empty($lines) ) {
$this->showChunk( $namespace, $from, $to );
} else {
$wgOut->addHTML( $this->namespaceForm( $namespace, $from, $to ) );
}
return;
}
# At this point, $lines should contain an even number of elements.
$out .= Xml::openElement( 'table', array( 'class' => 'allpageslist' ) );
while( count ( $lines ) > 0 ) {
$inpoint = array_shift( $lines );
$outpoint = array_shift( $lines );
$out .= $this->showline( $inpoint, $outpoint, $namespace );
}
$out .= Xml::closeElement( 'table' );
$nsForm = $this->namespaceForm( $namespace, $from, $to );
# Is there more?
if( $this->including() ) {
$out2 = '';
} else {
if( isset($from) || isset($to) ) {
global $wgUser;
$out2 = Xml::openElement( 'table', array( 'class' => 'mw-allpages-table-form' ) ).
'<tr>
<td>' .
$nsForm .
'</td>
<td class="mw-allpages-nav">' .
$wgUser->getSkin()->link( $this->getTitle(), wfMsgHtml ( 'allpages' ),
array(), array(), 'known' ) .
"</td>
</tr>" .
Xml::closeElement( 'table' );
} else {
$out2 = $nsForm;
}
}
$wgOut->addHTML( $out2 . $out );
}
/**
* Show a line of "ABC to DEF" ranges of articles
*
* #param $inpoint String: lower limit of pagenames
* #param $outpoint String: upper limit of pagenames
* #param $namespace Integer (Default NS_MAIN)
*/
function showline( $inpoint, $outpoint, $namespace = NS_MAIN ) {
global $wgContLang;
$inpointf = htmlspecialchars( str_replace( '_', ' ', $inpoint ) );
$outpointf = htmlspecialchars( str_replace( '_', ' ', $outpoint ) );
// Don't let the length runaway
$inpointf = $wgContLang->truncate( $inpointf, $this->maxPageLength );
$outpointf = $wgContLang->truncate( $outpointf, $this->maxPageLength );
$queryparams = $namespace ? "namespace=$namespace&" : '';
$special = $this->getTitle();
$link = $special->escapeLocalUrl( $queryparams . 'from=' . urlencode($inpoint) . '&to=' . urlencode($outpoint) );
$out = wfMsgHtml( 'alphaindexline',
"$inpointf</td><td>",
"</td><td>$outpointf"
);
return '<tr><td class="mw-allpages-alphaindexline">' . $out . '</td></tr>';
}
/**
* #param $namespace Integer (Default NS_MAIN)
* #param $from String: list all pages from this name (default FALSE)
* #param $to String: list all pages to this name (default FALSE)
*/
function showChunk( $namespace = NS_MAIN, $from = false, $to = false ) {
global $wgOut, $wgUser, $wgContLang, $wgLang;
$sk = $wgUser->getSkin();
$fromList = $this->getNamespaceKeyAndText($namespace, $from);
$toList = $this->getNamespaceKeyAndText( $namespace, $to );
$namespaces = $wgContLang->getNamespaces();
$n = 0;
if ( !$fromList || !$toList ) {
$out = wfMsgWikiHtml( 'allpagesbadtitle' );
} elseif ( !in_array( $namespace, array_keys( $namespaces ) ) ) {
// Show errormessage and reset to NS_MAIN
$out = wfMsgExt( 'allpages-bad-ns', array( 'parseinline' ), $namespace );
$namespace = NS_MAIN;
} else {
list( $namespace, $fromKey, $from ) = $fromList;
list( , $toKey, $to ) = $toList;
$dbr = wfGetDB( DB_SLAVE );
$conds = array(
'page_namespace' => $namespace,
'page_title >= ' . $dbr->addQuotes( $fromKey )
);
if( $toKey !== "" ) {
$conds[] = 'page_title <= ' . $dbr->addQuotes( $toKey );
}
$res = $dbr->select( 'page',
array( 'page_namespace', 'page_title', 'page_is_redirect' ),
$conds,
__METHOD__,
array(
'ORDER BY' => 'page_title',
'LIMIT' => $this->maxPerPage + 1,
'USE INDEX' => 'name_title',
)
);
if( $res->numRows() > 0 ) {
$out = Xml::openElement( 'table', array( 'class' => 'mw-allpages-table-chunk' ) );
while( ( $n < $this->maxPerPage ) && ( $s = $res->fetchObject() ) ) {
$t = Title::makeTitle( $s->page_namespace, $s->page_title );
if( $t ) {
$link = ( $s->page_is_redirect ? '<div class="allpagesredirect">' : '' ) .
$sk->linkKnown( $t, htmlspecialchars( $t->getText() ) ) .
($s->page_is_redirect ? '</div>' : '' );
} else {
$link = '[[' . htmlspecialchars( $s->page_title ) . ']]';
}
if( $n % 3 == 0 ) {
$out .= '<tr>';
}
$out .= "<td style=\"width:33%\">$link</td>";
$n++;
if( $n % 3 == 0 ) {
$out .= "</tr>\n";
}
}
if( ($n % 3) != 0 ) {
$out .= "</tr>\n";
}
$out .= Xml::closeElement( 'table' );
} else {
$out = '';
}
}
if ( $this->including() ) {
$out2 = '';
} else {
if( $from == '' ) {
// First chunk; no previous link.
$prevTitle = null;
} else {
# Get the last title from previous chunk
$dbr = wfGetDB( DB_SLAVE );
$res_prev = $dbr->select(
'page',
'page_title',
array( 'page_namespace' => $namespace, 'page_title < '.$dbr->addQuotes($from) ),
__METHOD__,
array( 'ORDER BY' => 'page_title DESC',
'LIMIT' => $this->maxPerPage, 'OFFSET' => ($this->maxPerPage - 1 )
)
);
# Get first title of previous complete chunk
if( $dbr->numrows( $res_prev ) >= $this->maxPerPage ) {
$pt = $dbr->fetchObject( $res_prev );
$prevTitle = Title::makeTitle( $namespace, $pt->page_title );
} else {
# The previous chunk is not complete, need to link to the very first title
# available in the database
$options = array( 'LIMIT' => 1 );
if ( ! $dbr->implicitOrderby() ) {
$options['ORDER BY'] = 'page_title';
}
$reallyFirstPage_title = $dbr->selectField( 'page', 'page_title',
array( 'page_namespace' => $namespace ), __METHOD__, $options );
# Show the previous link if it s not the current requested chunk
if( $from != $reallyFirstPage_title ) {
$prevTitle = Title::makeTitle( $namespace, $reallyFirstPage_title );
} else {
$prevTitle = null;
}
}
}
$self = $this->getTitle();
$nsForm = $this->namespaceForm( $namespace, $from, $to );
$out2 = Xml::openElement( 'table', array( 'class' => 'mw-allpages-table-form' ) ).
'<tr>
<td>' .
$nsForm .
'</td>
<td class="mw-allpages-nav">' .
$sk->link( $self, wfMsgHtml ( 'allpages' ), array(), array(), 'known' );
# Do we put a previous link ?
if( isset( $prevTitle ) && $pt = $prevTitle->getText() ) {
$query = array( 'from' => $prevTitle->getText() );
if( $namespace )
$query['namespace'] = $namespace;
$prevLink = $sk->linkKnown(
$self,
htmlspecialchars( wfMsg( 'prevpage', $pt ) ),
array(),
$query
);
$out2 = $wgLang->pipeList( array( $out2, $prevLink ) );
}
if( $n == $this->maxPerPage && $s = $res->fetchObject() ) {
# $s is the first link of the next chunk
$t = Title::MakeTitle($namespace, $s->page_title);
$query = array( 'from' => $t->getText() );
if( $namespace )
$query['namespace'] = $namespace;
$nextLink = $sk->linkKnown(
$self,
htmlspecialchars( wfMsg( 'nextpage', $t->getText() ) ),
array(),
$query
);
$out2 = $wgLang->pipeList( array( $out2, $nextLink ) );
}
$out2 .= "</td></tr></table>";
}
$wgOut->addHTML( $out2 . $out );
if( isset($prevLink) or isset($nextLink) ) {
$wgOut->addHTML( '<hr /><p class="mw-allpages-nav">' );
if( isset( $prevLink ) ) {
$wgOut->addHTML( $prevLink );
}
if( isset( $prevLink ) && isset( $nextLink ) ) {
$wgOut->addHTML( wfMsgExt( 'pipe-separator' , 'escapenoentities' ) );
}
if( isset( $nextLink ) ) {
$wgOut->addHTML( $nextLink );
}
$wgOut->addHTML( '</p>' );
}
}
/**
* #param $ns Integer: the namespace of the article
* #param $text String: the name of the article
* #return array( int namespace, string dbkey, string pagename ) or NULL on error
* #static (sort of)
* #access private
*/
function getNamespaceKeyAndText($ns, $text) {
if ( $text == '' )
return array( $ns, '', '' ); # shortcut for common case
$t = Title::makeTitleSafe($ns, $text);
if ( $t && $t->isLocal() ) {
return array( $t->getNamespace(), $t->getDBkey(), $t->getText() );
} else if ( $t ) {
return null;
}
# try again, in case the problem was an empty pagename
$text = preg_replace('/(#|$)/', 'X$1', $text);
$t = Title::makeTitleSafe($ns, $text);
if ( $t && $t->isLocal() ) {
return array( $t->getNamespace(), '', '' );
} else {
return null;
}
}
}

Not a great approach as you don't have a way of stopping when you get to the end of the list. You only want to split the items if there is more items than your maximum (although you may want to add some flexibility there, as you could get to the stage where you have two items on a page).
I assume that the datasets would actually come from a database, but using your $items array for ease of display
At its simplest, assuming it is coming from a web page that is sending an index number of the start and end, and that you have checked that those numbers are valid and sanitised
$itemsPerPage = 50; // constant
$itemStep = ($end - $start) / $itemsPerPage;
if($itemStep < 1)
{
for($i = $start; $i < $end; $i++)
{
// display these as individual items
display_link($items[$i]);
}
}
else
{
for($i = $start; $i < $end; $i += $itemStep)
{
$to = $i + ($itemStep - 1); // find the end part
if($to > $end)
$to = $end;
display_to_from($items[$i], $items[$to]);
}
}
where the display functions display the links as you want. However, one of the problems doing it like that is that you may want to adjust the items per page, as you run the risk of having a set of (say) 51 and ending up with a link from 1 to 49, and another 50 to 51.
I don't understand why you are arranging it in groups in your pseudocode, as you are going from page to page doing further chops, so you only need the start and end of each section, until you get to the page where all the links will fit.
-- edit
The original was wrong. Now you divide the amount of items you have to go through by the maximum items you want to display. If it is 1000, this will be listing ever 20 items, if it is 100,000 it will be every 2,000. If it is less than the amount you show, you can show them all individually.
-- edit again - to add some more about the database
No, you are right, you don't want to load 2,000,000 data records, and you don't have to.
You have two options, you can make a prepared statement such as "select * from articles where article = ?" and loop through the results getting one at a time, or if you want to do it in one block - Assuming a mysql database and the code above,
$numberArray = "";
for($i = $start; $i < $end; $i += $itemStep)
{
$to = $i + ($itemStep - 1); // find the end part
if($to > $end)
$to = $end;
// display_to_from($items[$i], $items[$to]);
if( $i != $start)
$numberArray += ", ";
$numberArray.= $i.", ".$to;
}
$sqlQuery = "Select * from articles where article_id in (".$numberArray.")";
... do the mysql select and go through the results, using alternate rows as the start and end
This gives you a query like 'Select * from articles where article_id in (1,49,50,99,100,149... etc)'
The process that as a normal set

My approach in pseudo-code:
$items = array('air', 'automatic', 'ball', ..., 'yield', 'zero', 'zoo');
$itemCount = count($items);
$itemsPerPage = 50; // constant
$counter = 0;
foreach ($items as $item) {
$groupNumber = floor($counter/$itemsPerPage);
// assign $item to group $groupNumber
$counter++;
}
// repeat this procedure recursively for each of the new groups
Do you think this is a good approach? Can you improve or complete it?

Related

Get post count from instagram api

I am trying to get the post count of an instagram account, I have managed to do followers and following but cant get to get the post count right.
<?php
$username = 'instagram';
$response = #file_get_contents( "https://www.instagram.com/$username/?__a=1" );
if ( $response !== false ) {
$data = json_decode( $response, true );
if ( $data !== null ) {
$full_name = $data['graphql']['user']['full_name'];
$follower = $data['graphql']['user']['edge_followed_by']['count'];
$follows = $data['graphql']['user']['edge_follow']['count'];
echo "<p>{$full_name}</p> <p>{$follower} followers {$follows} following.</p>";
}
} else {
echo 'Username not found.';
}
?>
If anyone ever needs the answer, I managed to pull it through...
<?php
$username = 'instagram';
$response = #file_get_contents( "https://www.instagram.com/$username/?__a=1" );
if ( $response !== false ) {
$data = json_decode( $response, true );
if ( $data !== null ) {
$full_name = $data['graphql']['user']['full_name'];
$follower = $data['graphql']['user']['edge_followed_by']['count'];
$follows = $data['graphql']['user']['edge_follow']['count'];
$posts = $data['graphql']['user']['edge_owner_to_timeline_media']['count'];
echo "<h2><a href='https://www.instagram.com/{$username}'>{$full_name}</a></h2>
<p><span>{$posts} posts</span> <span>{$follower} followers</span> <span>{$follows} following</span></p>";
}
} else {
echo 'Username not found.';
}
?>
You can get many options by just going to
https://www.instagram.com/$username/?__a=1
and changing the $username to the account you need to see

Pagination cannot move next

I want to display records from database using pagination, I am using the the code below it display the limit but when moving the next page or records it cannot load.
How to fix pagination moving next cannot load or move to page 2?
<?php
$dbhost = 'localhost';
$dbuser = 'root';
$dbpass = 'admin121';
$rec_limit = 10;
$conn = mysql_connect($dbhost, $dbuser, $dbpass);
if(! $conn ) {
die('Could not connect: ' . mysql_error());
}
mysql_select_db('misdb');
$sql = "SELECT count(S_ID) FROM student";
$retval = mysql_query( $sql, $conn );
if(! $retval ) {
die('Could not get data: ' . mysql_error());
}
$row = mysql_fetch_array($retval, MYSQL_NUM );
$rec_count = $row[0];
if( isset($_GET{'page'}) ) {
$page = $_GET{'page'} + 1;
$offset = $rec_limit * $page ;
} else {
$page = 0;
$offset = 0;
}
$left_rec = $rec_count - ($page * $rec_limit);
$sql = "SELECT S_ID, LastName, FirstName ".
"FROM student ".
"LIMIT $offset, $rec_limit";
$retval = mysql_query( $sql, $conn );
if(! $retval ) {
die('Could not get data: ' . mysql_error());
}
while($row = mysql_fetch_array($retval, MYSQL_ASSOC)) {
echo "EMP ID :{$row['S_ID']} <br> ".
"EMP NAME : {$row['LastName']} <br> ".
"EMP SALARY : {$row['FirstName']} <br> ".
"--------------------------------<br>";
}
if( $page > 0 ) {
$last = $page - 2;
echo "Last 10 Records |";
echo "Next 10 Records";
} else if( $page == 1 ) {
echo "Next 10 Records";
} else if( $left_rec < $rec_limit ) {
$last = $page - 2;
echo "Last 10 Records";
}
You only need to remove white spaces, so change this:
\"$_PHP_SELF?page = $last\"
Into this:
\"$_PHP_SELF?page=$last\"

Query results and pagination showing first page only

I am trying to build a scraper for educational purposes only, I am using Phasher class to generate Hexadecimal hashes and store in the database then search the stored images for similar images, I wrote something a few days ago trying to show the results for the search for similar pictures but I can't figure out why it's showings only the results on first page only and other pages doesn't show them whe I press on number 1 page it doesn't show anything but the results are correct and the number of generated are correct I am new in PHP and I am trying to learn it by doing any help will appreciate it thanks in advance.
This is index.php
<?php include('header.php'); ?>
<nav class="navbar navbar-default">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="index.php">
<img alt="FBpp logo" src="images/logo.png">
</a>
</div>
</div>
</nav>
<div class="container"><!--container-->
<h3>Search Facebook Profiles Pictures For Similar Pictures.</h3>
<p>Please upload a picture, Allowed extensions are (jpg, jpeg, pjpeg, png, x-png) and maximum size is 5 Mb...</p>
<?php
//Require config.php file to connect with mysql server and the db.
require_once('config.php');
//Check if the database is empty or if there are hashed pictures then show the number of hashed pictures.
$check = mysqli_query($con, "SELECT id FROM images ORDER BY id DESC LIMIT 1;");
if(mysqli_num_rows($check) > 0){
$max_id = mysqli_fetch_row($check);
$id = $max_id[0];
echo 'We scraped '; echo '<span class="bg-info">'.$id.'</span>'; echo ' pictures...';
}else{
echo 'The database is empty you need to run scraper.php';
}
?>
<br /><br />
<form action="search.php" method="post" class="form-inline reset-margin" enctype="multipart/form-data">
<div class="form-group">
<input type="file" name="image" class="file-input">
<button type="submit" name="submit" class="btn btn-primary"><span class="glyphicon glyphicon-search" aria-hidden="true"></span></button>
</div>
</form>
<br />
<?php include('footer.php'); ?>
This is search.php
<?php
include('header.php');
//Require config.php file to connect with mysql server and the db.
require_once('config.php');
include_once('classes/phasher.class.php');
$I = PHasher::Instance();
require_once('classes/paginator.class.php');
$limit = ( isset( $_GET['limit'] ) ) ? $_GET['limit'] : 10;
$page = ( isset( $_GET['page'] ) ) ? $_GET['page'] : 1;
$links = ( isset( $_GET['links'] ) ) ? $_GET['links'] : 7;
if(isset($_POST['submit'])){
$allowedExts = array('jpg', 'jpeg', 'pjpeg', 'png', 'x-png');
$temp = explode(".", $_FILES["image"]["name"]);
$extension = end($temp);
//Check if the extenstion of the uploaded picture is correct and the max size is 5*1024*1024 Megabits.
if((($_FILES["image"]["type"] == "image/jpg")
|| ($_FILES["image"]["type"] == "image/jpeg")
|| ($_FILES["image"]["type"] == "image/pjpeg")
|| ($_FILES["image"]["type"] == "image/png")
|| ($_FILES["image"]["type"] == "image/x-png"))
&& ($_FILES["image"]["size"] <= 5242880)
&& in_array($extension, $allowedExts)){
//Check if there is an error in the file, If not upload it to tmp folder then check db for similar pictures.
if($_FILES["image"]["error"] > 0){
echo "Return Code: " .$_FILES["image"]["error"]."<br />";
} else {
move_uploaded_file($_FILES["image"]["tmp_name"], dirname(__file__)."/tmp/".$_FILES["image"]["name"]);
$uploadedImage = dirname(__file__)."/tmp/".$_FILES["image"]["name"];
if($_FILES["image"]["size"] > 0){
$hash = $I->FastHashImage($uploadedImage);
$hex = $I->HashAsString($hash);
$query = "SELECT `fid`,`hash` FROM `images` WHERE `hash` LIKE '%".$hex."%'";
$queryResult = mysqli_query($con, $query);
$numrows = mysqli_num_rows($queryResult);
echo "<p>" .$numrows. " results found for " .$_FILES['image']['name']. "</p><br />";
$Paginator = new Paginator( $con, $query );
$results = $Paginator->getData( $limit, $page );
//Loop through result set.
/*while($row = mysqli_fetch_array($selectQuery)){
if($row['hash'] == $hex){
$fid = $row['fid'];
echo "<a href='https://www.facebook.com/$fid/' target='_blank'><img src='http://localhost/fbpp/test_pics/$fid.jpg' alt='' class='img-responsive'></a><br />";
// echo "<a href='https://www.facebook.com/$fid/' target='_blank'><img src='https://graph.facebook.com/$fid/picture?type=large' alt='' class='img-responsive'></a><br />";
}
}*/
echo '<div class="col-md-10 col-md-offset-1">
<table class="table table-striped table-condensed table-bordered table-rounded"><tbody>';
for( $i = 0; $i < count( $results->data ); $i++ ){
if($results->data[$i]["hash"] == $hex){
echo '<tr>';
$fid = $results->data[$i]['fid'];
echo "<td><a href='https://www.facebook.com/$fid/' target='_blank'><img src='http://localhost/fbpp/test_pics/$fid.jpg' alt='' class='img-responsive'></a></td>";
// echo "<td><a href='https://www.facebook.com/$fid/' target='_blank'><img src='https://graph.facebook.com/$fid/picture?type=large' alt='' class='img-responsive'></a></td>";
echo '</tr>';
}
}
if($numrows <= 10)
{
echo "";
} else {
echo '</tbody></table>';
echo $Paginator->createLinks( $links, 'pagination pagination-sm' );
echo '</div>';
}
}
//Else after checking the file size.
else {
echo "Picture is corrupted the size is 0";
}
} //Else after error check.
}
// This else after checking the picture extenstion and max size.
else {
echo "<p>Please Upload A Picture, Max. size is 5 Mb.</p>";
}
}
include('footer.php');
?>
This is pagination class if you want to look at it:
<?php
class Paginator {
private $_conn;
private $_limit;
private $_page;
private $_query;
private $_total;
public function __construct( $conn, $query ) {
$this->_conn = $conn;
$this->_query = $query;
$rs= $this->_conn->query( $this->_query );
$this->_total = $rs->num_rows;
}
public function getData( $limit = 10, $page = 1 ) {
$this->_limit = $limit;
$this->_page = $page;
if ( $this->_limit == 'all' ) {
$query = $this->_query;
} else {
$query = $this->_query . " LIMIT " . ( ( $this->_page - 1 ) * $this->_limit ) . ", $this->_limit";
}
$rs = $this->_conn->query( $query );
while ( $row = $rs->fetch_assoc() ) {
$results[] = $row;
}
$result = new stdClass();
$result->page = $this->_page;
$result->limit = $this->_limit;
$result->total = $this->_total;
$result->data = $results;
return $result;
}
public function createLinks( $links, $list_class ) {
if ( $this->_limit == 'all' ) {
return '';
}
$last = ceil( $this->_total / $this->_limit );
$start = ( ( $this->_page - $links ) > 0 ) ? $this->_page - $links : 1;
$end = ( ( $this->_page + $links ) < $last ) ? $this->_page + $links : $last;
$html = '<ul class="' . $list_class . '">';
$class = ( $this->_page == 1 ) ? "disabled" : "";
$html .= '<li class="' . $class . '">«</li>';
if ( $start > 1 ) {
$html .= '<li>1</li>';
$html .= '<li class="disabled"><span>...</span></li>';
}
for ( $i = $start ; $i <= $end; $i++ ) {
$class = ( $this->_page == $i ) ? "active" : "";
$html .= '<li class="' . $class . '">' . $i . '</li>';
}
if ( $end < $last ) {
$html .= '<li class="disabled"><span>...</span></li>';
$html .= '<li>' . $last . '</li>';
}
$class = ( $this->_page == $last ) ? "disabled" : "";
$html .= '<li class="' . $class . '">»</li>';
$html .= '</ul>';
return $html;
}
}
For more information about this you can look at latest commit on my github account:
github.com/jadolyo/FBpp
Thanks in advance I appreciate any help.
Well with a lot of thinking reading and debugging I had to write the code from scratch again in OOP and I discovered that I need to use sessions because the value of the image path was empty or not completed when I press on any page of the paginator anyway here is the complete code if someone looking for an answer for the same problem...
<?php
session_start();
class Search{
function __construct()
{
}
/**
* Upload posted image from index.php to tmp dir
* #return string
*/
function uploadImage()
{
if(isset($_POST['submit']))
{
move_uploaded_file($_FILES['image']['tmp_name'], dirname(__file__).'/tmp/'.$_FILES['image']['name']);
$uploadedImage = dirname(__file__).'/tmp/'.$_FILES['image']['name'];
$_SESSION['image'] = $uploadedImage;
}
return $_SESSION['image'];
}
function imageHashing()
{
include_once('classes/phasher.class.php');
$I = PHasher::Instance();
$hash = $I->FastHashImage(Search::uploadImage());
$hex = $I->HashAsString($hash);
$query = "SELECT `fid`,`hash` FROM `images` WHERE `hash` LIKE '%".$hex."%'";
//echo $query;
return $query;
}
function imageResults()
{
require_once('config.php');
require_once('classes/paginator.class.php');
$limit = ( isset( $_GET['limit'] ) ) ? $_GET['limit'] : 10;
$page = ( isset( $_GET['page'] ) ) ? $_GET['page'] : 1;
$links = ( isset( $_GET['links'] ) ) ? $_GET['links'] : 7;
$queryResults = mysqli_query($con, Search::imageHashing());
$numrows = mysqli_num_rows($queryResults);
echo "<p>" .$numrows. " results found.</p><br />";
$Paginator = new Paginator( $con, Search::imageHashing() );
$results = $Paginator->getData( $limit, $page );
for( $i = 0; $i < count( $results->data ); $i++ ){
echo '<tr>';
$fid = $results->data[$i]['fid'];
echo '<td>';
echo "<a href='https://www.facebook.com/$fid/' target='_blank'>https://www.facebook.com/$fid/</a>";
echo "<a href='https://www.facebook.com/$fid/' target='_blank'><img src='https://graph.facebook.com/$fid/picture?type=large' alt='' class='img-responsive'></a>";
$name = 'https://graph.facebook.com/'.$fid.'?fields=name&access_token=748352698603001|94fc98094ca42f974879c56f3229c5e4';
$response = file_get_contents($name);
$user = json_decode($response,true);
echo $user['name'];
echo '</td>';
echo '</tr>';
}
if($numrows <= 10){
echo "";
} else {
echo '</tbody></table>';
echo $Paginator->createLinks( $links, 'pagination pagination-sm' );
echo '</div>';
}
}
}
//Search::uploadImage();
//Search::imageHashing();
Search::imageResults();
?>

How to import the excel file to mySQL database using php

I want to import the data in the excel to mySQL DB using php. I have tried using the way explained in other questions but nothing worked out for me. Kindly let me know how to import the data in to DB using php.
Also, do let me know where to place the excel file to be uploaded,I mean the location in the system.
method 1.you can use load data command
http://blog.tjitjing.com/index.php/2008/02/import-excel-data-into-mysql-in-5-easy.html
method 2. Excel reader
https://code.google.com/p/php-excel-reader/
method 3. parseCSV
https://github.com/parsecsv/parsecsv-for-php
method4. (PHP 4, PHP 5) fgetcsv
http://in1.php.net/fgetcsv
please refer this PHP code
<?php
//table Name
$tableName = "MyTable";
//database name
$dbName = "MyDatabase";
$conn = mysql_connect("localhost", "root", "") or die(mysql_error());
mysql_select_db($dbName) or die(mysql_error());
//get the first row fields
$fields = "";
$fieldsInsert = "";
if (($handle = fopen("test.csv", "r")) !== FALSE) {
if(($data = fgetcsv($handle, 1000, ",")) !== FALSE) {
$num = count($data);
$fieldsInsert .= '(';
for ($c=0; $c < $num; $c++) {
$fieldsInsert .=($c==0) ? '' : ', ';
$fieldsInsert .="`".$data[$c]."`";
$fields .="`".$data[$c]."` varchar(500) DEFAULT NULL,";
}
$fieldsInsert .= ')';
}
//drop table if exist
if(mysql_num_rows(mysql_query("SHOW TABLES LIKE '".$tableName."'"))>=1) {
mysql_query('DROP TABLE IF EXISTS `'.$tableName.'`') or die(mysql_error());
}
//create table
$sql = "CREATE TABLE `".$tableName."` (
`".$tableName."Id` int(100) unsigned NOT NULL AUTO_INCREMENT,
".$fields."
PRIMARY KEY (`".$tableName."Id`)
) ";
$retval = mysql_query( $sql, $conn );
if(! $retval )
{
die('Could not create table: ' . mysql_error());
}
else {
while(($data = fgetcsv($handle, 1000, ",")) !== FALSE) {
$num = count($data);
$fieldsInsertvalues="";
//get field values of each row
for ($c=0; $c < $num; $c++) {
$fieldsInsertvalues .=($c==0) ? '(' : ', ';
$fieldsInsertvalues .="'".$data[$c]."'";
}
$fieldsInsertvalues .= ')';
//insert the values to table
$sql = "INSERT INTO ".$tableName." ".$fieldsInsert." VALUES ".$fieldsInsertvalues;
mysql_query($sql,$conn);
}
echo 'Table Created';
}
fclose($handle);
}
?>

WP_Query() not paging after adding offset => 1

I am trying to do a simple post query that will be paged, but the query results should offset by 1 (hiding first result). This works but when i got to /page2/ or above, it keeps showing the 2-10 posts as it did on the home page.
Any ideas what is:
<?php
if ( get_query_var('paged') ) {
$paged = get_query_var('paged');
} else if ( get_query_var('page') ) {
$paged = get_query_var('page');
} else {$paged = 1; }
$args = array( 'post_type' => 'post', 'posts_per_page' => $wp_query->max_num_pages, 'paged' => $paged, 'offset' => 1);
$wp_query = new WP_Query();
$wp_query->query( $args );
while ($wp_query->have_posts()) : $wp_query->the_post();
?>

Resources