Thread: Ajax++ Cloaking
perkiset

Hey NBs-

I see you in here  Applause

Lemmee know when you have enough bandwidth for some code and I'll start barfing it out. Take me just a few minutes to get it clean and separate enough to make sense.

Nice to have you here,
/p

perkiset

Here's the code for my

Ajax

  Requestor (I'll post usage in a moment):


// ----------------------------------------------------------- //
//                       

ajax

 Requestor                         //
// ----------------------------------------------------------- //
function

ajax

 Requestor() { this.clearAll(); }

ajax

 Requestor.prototype.__defaultError = function(sender)
{
var tempStr = "

ajax

 Requestor Error: " +
                  "status: " + this.requestor.status + " " +
          "headers: " + this.requestor.getAllResponseHeaders();
alert(tempStr);
}

ajax

 Requestor.prototype.__defaultSuccess = function(sender)
{
alert("

ajax

 Requestor successfully returned from a request - but there is no handler assigned to receive it");
}

ajax

 Requestor.prototype.__decodeString = function(inputStr)
{
var decoded = unescape(inputStr);
decoded = decoded.replace(/\%2F/g, "/");
decoded = decoded.replace(/\%3F/g, "?");
decoded = decoded.replace(/\%3D/g, "=");
decoded = decoded.replace(/\%26/g, "&");
decoded = decoded.replace(/\%40/g, "@");
return decoded;
}

ajax

 Requestor.prototype.__encodeString = function(inputStr)
{
var encoded = escape(inputStr);
encoded = encoded.replace(///g,"%2F");
encoded = encoded.replace(/?/g,"%3F");
encoded = encoded.replace(/=/g,"%3D");
encoded = encoded.replace(/&/g,"%26");
encoded = encoded.replace(/@/g,"%40");
return encoded;
}

ajax

 Requestor.prototype.__getParams = function()
{
if (this.getNames.length == 0) { return ""; }
var out = (this.url.indexOf('?') == -1) ? '?' : '&';
for (var i=0; i<this.getNames.length; i++)
{
out += this.getNames<> + '=' + this.getValues<>;
if (i < (this.getNames.length - 1)) { out += '&'; }
}
return out;
}

ajax

 Requestor.prototype.__getRequestor = function()
{
if ((this.requestor != null) && (!this.reqIsIE)) { return true; }

try {
this.requestor = new XMLHttpRequest();
this.reqIsIE = false;
return; true;
} catch(e) {}

try {
this.requestor = new ActiveXObject("Msxml2.XMLHTTP.6.0");
this.reqIsIE = true;
return; true;
} catch(e) {}

try {
this.requestor = new ActiveXObject("Msxml2.XMLHTTP.3.0");
this.reqIsIE = true;
return; true;
} catch(e) {}

try {
this.requestor = new ActiveXObject("Msxml2.XMLHTTP");
this.reqIsIE = true;
return; true;
} catch(e) {}

try {
this.requestor = new ActiveXObject("Microsoft.XMLHTTP");
this.reqIsIE = true;
return; true;
} catch(e) {}

alert('

ajax

 Requestor Fatal Error: Cannot instantiate an XMLHTTP Object');
}

ajax

 Requestor.prototype.__xmitLog = function(theMsg)
{
var bodyArr = document.getElementsByTagName('body');
var theBody = bodyArr[0];
theBody.appendChild(document.createTextNode(theMsg));
theBody.appendChild(document.createElement('br'));
}

ajax

 Requestor.prototype.__onRTS = function()
{
if ((this.requestor.readyState >= 2) && (this.timeoutHandle))
{
clearTimeout(this.timeoutHandle);
this.timeoutHandle = false;
}

    if (this.requestor.readyState == 4)
{
if (this.masterStatus) { this.masterStatus.handleChange(false); }
if ((this.requestor.status==200) || (this.requestor.status==0))
{
this.lastResponse = this.__decodeString(this.requestor.responseText);
if (!this.lastResponse)
{
return false;
}
if (this.xmlHandler)
{
this.xmlHandler.importXML(this.lastResponse);
}
this.onSuccess(this);
} else {
switch(this.requestor.status)
{
case 12029:
case 12030:
case 12031:
case 12152:
case 12159:
// OK: It's the IE SSL bug. Create a tiemout to call <me> again...
//alert('reloading');
var loader = this;
setTimeout( function() { loader.execute.call(loader); }, 10);
break;

default:
this.onError(this);
}
}
this.busy = false;
}
}

ajax

 Requestor.prototype.__postParams = function()
{
var out = "";
var varNames = '';
for (var i=0; i<this.postNames.length; i++)
{
if (i > 0) { varNames += '|'; }
varNames += this.postNames<>;
if (i > 0) { out += '&'; }
out += this.postNames<> + '=' + this.__encodeString(this.postValues<>);
}
if (out) { out += '&' + '

ajax

 _var_names=' + varNames; }
return out;
}

ajax

 Requestor.prototype.abort = function()
{
if (this.busy)
{
// clear timeout as well
this.requestor.abort();
clearTimeout(this.timeoutHandle);
this.timeoutHandle = false;
this.busy = false;
}
}

ajax

 Requestor.prototype.clear = function()
{
this.methodPost = true;
this.__transStatus = 0;
this.__transBusy = false;
    this.lastResponse = new String();
this.selfReference = null;
    this.newRequest();
this.timeoutHandle = false;
this.timeoutMS = 8000;
}

ajax

 Requestor.prototype.clearAll = function()
{
    this.xmlHandler = null;
    this.masterStatus = null;
    this.onUnrecognized = new String();

    this.onError = this.__defaultError;
    this.onSuccess = this.__defaultSuccess;
   
    this.clear();
}

ajax

 Requestor.prototype.execute = function(timeoutVal)
{
if (this.busy)
{
// clear timeout as well
this.requestor.abort();
this.busy = false;
}

var thisTimeoutVal = this.timeoutMS;
if (timeoutVal != undefined) { thisTimeoutVal = timeoutVal; }

this.__getRequestor();

if (!this.requestor) {
alert("You cannot dispatch a request on this

mac

 hine (no viable XMLHTTPRequestor)");
return "";
}
if (!this.url) {
alert("You must supply a URL to

ajax

 Requestor to process a request");
return "";
}

this.busy = true;
var httpMethod = (this.methodPost) ? 'POST' : 'GET';

var theURL = this.url;
theURL += this.__getParams();
this.lastRequest = theURL;

var loader = this;
this.requestor.onreadystatechange = function() { loader.__onRTS.call(loader); }
if (this.masterStatus) { this.masterStatus.handleChange(true); }

// Set a callback to <me> in case the request takes to long...
this.timeoutHandle = setTimeout( function() { loader.__handleTimeout.call(loader); }, this.timeoutMS);

    this.requestor.open('POST', theURL, true);
    this.requestor.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");   

    this.requestor.send(this.__postParams());

}

ajax

 Requestor.prototype.__handleAbort = function()
{
if (this.masterStatus) { this.masterStatus.handleChange(false); }
this.requestor.onreadystatechange = null;
this.requestor.abort();
}

ajax

 Requestor.prototype.__handleTimeout = function()
{
this.__handleAbort();
var loader = this;
setTimeout(function() { loader.execute.call(loader); }, 100);
}

ajax

 Requestor.prototype.getParam = function(key, value)
{
var ptr = this.getNames.length;
for (var i=0; i<this.getNames.length; i++)
{
if (this.getNames<> == key) { ptr = i; }
}
this.getNames[ptr] = key;
this.getValues[ptr] = value;
}

ajax

 Requestor.prototype.method = function(doPost)
{
this.methodPost = (doPost);
}

ajax

 Requestor.prototype.newRequest = function()
{
this.getNames = new Array();
this.getValues = new Array();
this.postNames = new Array();
this.postValues = new Array();
this.url = '';
}

ajax

 Requestor.prototype.postParam = function(key, value)
{
var ptr = this.postNames.length;
for (var i=0; i<this.postNames.length; i++)
{
if (this.postNames<> == key) { ptr = i; }
}
this.postNames[ptr] = key;
this.postValues[ptr] = value;
}

perkiset

Use it like this:

< script >

ajax

 1 = new

ajax

 Requestor();

ajax

 1.url = '/my

Ajax

 URL.

php

 ';

ajax

 1.onSuccess = handle

Ajax

 ;

ajax

 1.postParam('paramName', 'paramValue');

ajax

 1.postParam('anotherParam', 'anotherValue);

ajax

 1.execute();

function handle

Ajax

 (sender)
{
alert(sender.lastResponse);
}
< / script >
That's about it. You can instantiate as many as you want and the messaging will stay true to the object that sent it. You do not need to pass any params, I just put that here to show you how I do it. I typically have a param, 'request' and a value of "loaditems" or "refreshdata" and such - then I have a single handling page at the server that takes all

ajax

  requests and switch/cases between them.

/p

nop_90

my suggestions.
(hot as shit brain is fried) Applause
biggest problem with JS is cross browser.

Protype JS lib handles all the

ajax

  shit, plus other funky things, it wieghts in at like 20k,
It is the defacto standard of JS on ruby on rails so have big following.
now days with gzip on sever, that is nothing.

http://www.prototypejs.org/ lib itself
http://www.prototypejs.org/

learn

 /introduction-to-

ajax

  that is how to use it
simplest case stolen from example sheet above

Suppose you have this code in your HTML document:
<h2>Our fantastic products</h2>
<div id="products">(fetching product list ...)</div>

The 'products' container is empty and you want to fill it with HTML returned from an

Ajax

  response. No problem:
new

Ajax

 .Updater('products', '/some_url', { method: 'get' });
some_url returns html whic gets shoved in the products div

this is overview of lib
http://blogs.ebusiness-apps.com/jordan/pages/Prototype%20Library%20Info.htm
It ads to JS useful DOM,string stuff etc, and more importantly make them cross browser.

Anyway my 2 cents Applause

perkiset

Hey Nop -

we're working through the remains of a concept NB posted @ syndk8 but we wanted to do more privately. The

Ajax

  components is actually very small - he simply needs to be able to reliably throw a single request so that he can corroborate with an image beacon that the user is a for-reals, non-bot surfer. I'm gonna post the code to drop the cookies and check it all in a little bit as well.

All that being said - I'm gonna look at that lib to see if it might make some of my issues smoother - thanks!

/p

perkiset

Here is a storage class that I use for pushing cookies onto the local site. It is a little heavy for this application, because I use it to store lotsamany cookies in one cookie... but it will do the trick. You can also lighten it up severely (as you can with the

ajax

 Requestor) to do more single-minded tricks.

// ----------------------------------------------------------- //
//                      localStorage                          //
// ----------------------------------------------------------- //
function localStorage() { this.clear(); }
localStorage.prototype.clear = function()
{
this.fileName = new String();
}
localStorage.prototype._getRaw = function()
{
var rawBuff = document.cookie;
var cookie

RegEx

 p = new

RegEx

 p("\b" + this.fileName + "=([^;]*)");
theValue = cookie

RegEx

 p.exec(rawBuff);
if (theValue != null) { theValue = theValue[1]; }
return theValue;
}
localStorage.prototype.asArray = function()
{
var outArr = new Object();
var rawBuff = this._getRaw(this.fileName);
if (rawBuff == undefined) { return false; }
var tempArr = rawBuff.match(/([^&]+)/g);
for (var i=0; i<tempArr.length; i++)
{
var parts = tempArr<>.match(/([^=]+)=(.*$)/);
var varName = parts[1];
var varValue = parts[2];
outArr[varName] = unescape(varValue);
}
return outArr;
}
localStorage.prototype.dropFile = function()
{
if (this.fileName)
{
var expiredDate = new Date();
expiredDate.SetMonth(-1);
var writeBuff = this.fileName + "=";
writeBuff += "expires=" + expiredDate.toGMTString();
document.cookie = writeBuff;
}
}
localStorage.prototype.dropItem = function(theName)
{
var rawBuff = readUnEscapedCookie(this.fileName);
if (rawBuff)
{
var stripAttribute

RegEx

 p = new

RegEx

 p("(^|/&Applause" + theName + "=[^&]*&?");
rawBuff = rawBuff.replace(stripAttribute

RegEx

 p, "$1");
if (rawBuff.length != 0)
{
var newBuff = this.fileName + "=" + rawBuff;
document.cookie = newBuff
} else { this.dropFile(); }
}
}
localStorage.prototype.enabled = function()
{
var cookiesEnabled = window.navigator.cookieEnabled;
if (!cookiesEnabled)
{
document.cookie = "cookiesEnabled=True";
cookiesEnabled = new Boolean(document.cookie).valueOf();
}
return cookiesEnabled;
}
localStorage.prototype.retrieveItem = function(theName)
{
var rawBuff = this._getRaw(this.fileName);
var extractMultiValueCookie

RegEx

 p = new

RegEx

 p("\b" + theName + "=([^;&]*)");
resValue = extractMultiValueCookie

RegEx

 p.exec(rawBuff);
if (resValue != null) { resValue = unescape(resValue[1]); }
return resValue;
}
localStorage.prototype.storeItem = function(theName, theValue)
{
var rawBuff = this._getRaw(this.fileName);
if (rawBuff)
{
var stripAttribute

RegEx

 p = new

RegEx

 p("(^|&Applause" + theName + "=[^&]*&?");
rawBuff = rawBuff.replace(stripAttribute

RegEx

 p, "$1");
if (rawBuff.length != 0) { rawBuff += "&"; }
} else rawBuff = "";

rawBuff += theName + "=" + escape(theValue);
document.cookie = this.fileName + "=" + rawBuff;
}

perkiset

Usage:

storage = new localStorage();
storage.fileName = 'aNameThatIsMeaningfulToYouOrTheApplication';
storage.storeItem('anItemName', 'anItemValue');
var newVal = storage.retrieveItem('anItemName');

thedarkness

I hear people saying IP delivery is the better method of cloaking......

Advantages/disadvantages over this method?

Cheers,
td

nutballs

i still havent gotten around to this, but plan on it.

IP cloaking doesnt catch spiders on new IPs that are specifically out to get you, such as cloak busters.

the advantage with this is that you require enough things that a real surfer would have enabled, and you can protect your final destination even when the IPs fail. Although this is still cloaking, i would actually call it Human Traffic Direction. I dont want to cloak to the spider, i want to direct the humans.

the scenario is very similar to a normal doorway bot detection cloak. you have a content site and a sales site. you dont care if the spiders find either site, you just dont want them to make a connection between the two. doorways would be an example. When a user or a bot land directly on the sales site, great, whatever, who cares, thats what you want anyway and it was a freebee. But when a user lands on a doorway, you want them directed to the sales site, per normal. When a bot lands on the doorway, you just keep them there.

Not really any different than doorway cloaking, but instead of checking to see if its a bot, flip the paradigm around, and instead, check to see if the surfer is human. Captchas use the same concept, since they cant reliably tell if you are an automation anymore, they now make you-the-human do something to prove it.

My thought was to make your system prove your human.
can i cookie you?
can you request an image from within the same usersession?
can you do JS?
can you do

AJAX

 ?

no bot does that all.
make sense?

i still would do IP cloaking as well, for a while. the thing is, if something like that became mainstream, the bots would start looking for it you would think right? so you can still test IPs to catch "smart bots" and now you know your human director is now pooched and it was fun while it lasted.

thedarkness

OK, so both, of course nothing will save you from a Google employee with a browser on an unknown IP but that's the risk we take I guess (no way around that).

Cheers,
td

nop_90

In a nutshell simplicity.
If you are going to get nailed IP cloaking will not save your ass, nor will it fool the savy inte

rnet

  user.

JS Cloaking on the other hand is simple, plus it does not require you have control over the host.
I personally use a version of full iframe cloaking that i stole off sndk8

thedarkness

So if a humans looking to bust you, they're going to bust you.

Cheers,
td

nop_90

quote author=thedarkness link=topic=12.msg1583#msg1583 date=1179557412

So if a humans looking to bust you, they're going to bust you.

That is the way i see it.
If you get busted does not matter as long as u have made a profit Applause.

nutballs

i agree as well.

for me, this is more a matter of stats control and traffic control. the usage I have planned for this is both the cloaking

asp

 ect to hide the destination sites from engines, but also for legitimate redirectionification of users.

perkiset

@redirectionification  Applause

Hey... what are we still doing up and @ our

mac

 hines?

Losers!  Applause

G'Night,
/p

nutballs

i wasnt tired. was entering weights into a new product catalog.

thedarkness

quote author=nutballs link=topic=12.msg1591#msg1591 date=1179587945

i wasnt tired. was entering weights into a new product catalog.


weights?

nutballs

ya, i am launching a new online store, and i needed to enter the weights of the products for realtime UPS calcs. Stoopid dropshipper doesnt have the weights in an easy to deal with file. i tried a few tricks, but gave up and manually entered it all. only a few hundred products, so no biggy, just mind numbing.

thedarkness

quote author=nutballs link=topic=12.msg1601#msg1601 date=1179637394

ya, i am launching a new online store, and i needed to enter the weights of the products for realtime UPS calcs. Stoopid dropshipper doesnt have the weights in an easy to deal with file. i tried a few tricks, but gave up and manually entered it all. only a few hundred products, so no biggy, just mind numbing.


lol, I can relate dude. Some people have no idea about keeping data clean and useable.

Cheers,
td

nop_90

I thought about opening an online store.

but then i might have to leave the house to actually buy a product ....
then i would have to like pay for hosting or some crap like that ...
I also probably would have to actually make a website ....

shit i am all tired just thinking about that.

nutballs

so I am gonna start monkeying with this idea now.

I have had some thoughts about usages as well. Although im sure you guys can guess my primary usage for this, which would be traffic direction in my BH

net

 work, I actually have a legitimate use for this, that i think could be interesting.

Every social bookmarking site out there has a problem, and that problem is spammers. So they add in Captchas, which can be broken, they add in codes into the URL which also can be broken, and a host of other things.

I run two social bookmarking sites, lower end, and they dont make any money because that wasnt the purpose in them. But now, I am wondering if i jumped into the game, with a good design, and simplify my concept down to the most basic level, would i get a userbase.
The biggest things with SBM sites are:
adding the bookmark easily. done
mark as spam. done
mark as good. done
personalizing bookmarks, like titles and such. done.
Ranking Algo. done.
way to prevent spammers from using your site for evil. not done.

So here is the thought. that last part of preventing spammers is the biggest hurdle for any user generated content site. Captchas are annoying, and can be busted if the target is worth the effort. everyone hates them. so what if you could determine "friend or foe" without any captcha. without any interaction with the user?

you could technically do this with usage analysis. but that means the user would have to be around for a while before you trust him. so thats no good for a new user anxious to try out this cool new SBM site.

So... what if you used the browser to your advantage. The idea of how a browser works is what has already been said, but i will reiterate. the browser requests a page, parses it, requests images to be downloaded that the page requires, sets cookies if that site asked for them in the headers, and then run any JS on the page. If that JS is

ajax

 , it then continues with communication if required. This all happens in seconds.

all bots currently work under the principle of download the page, come back later for the extras.
so have the page do the following:
request an image, which is actually a routine on the server.
that image sets a cookie.
the page itself sets a cookie from serverside.
and a cookie is set via JS
JS in the page makes a request via

ajax

  which then tests for the cookies which have some kind of key pair in them.

so if a bot, then none of those things will happen at the same time.
if user with a browser, they should all happen in the same basic timeframe.

The way I understand all of this to work is that all the communication for this can be sniffed with even the most basic of header analyzer. would HTTPS eliminate that problem? or is there a way to hide whats going on? im guessing not, since all traffic across the wire would be able to be sniffed. So the spammer would now know that, we set three cookies, and make 2 requests out, 1 to a fake GIF and one to an

ajax

  request.

basically, i am wondering if there is a way to pull this off, with enough back and forthing between the client and server, to make it very difficult to crack.

like page loads, js requests image, image sets cookie, js reads cookie, js sends code back to server, assuming code matches, server replies with routine to allow redirect, redirect is requested, etc etc etc.

thoughts?

nop_90

quote
The way I understand all of this to work is that all the communication for this can be sniffed with even the most basic of header analyzer. would HTTPS eliminate that problem? or is there a way to hide whats going on? im guessing not, since all traffic across the wire would be able to be sniffed. So the spammer would now know that, we set three cookies, and make 2 requests out, 1 to a fake GIF and one to an

ajax

  request.

basically, i am wondering if there is a way to pull this off, with enough back and forthing between the client and server, to make it very difficult to crack.

Simple answer no.
I am getting lazy and writing curl code is getting boring. I am thinking how i could run multiple browsers which have greasemonkey installed on them and control them from a program of some sort Applause. That way I will not have to worry about stupid

ajax

  shit etc Applause

Since the user controls 1/2 of the equation, and it is his playground he can do anything he wants.
Saying that i think ur scheme good one. It will eliminate quite a few of the spammer.

I am pondering if u somehow could take the info from the sites users mark as spam and use it to train a spam detector,
(kinda like they do with email spam ?).

thedarkness

I start with sockets, if I can tell that's not going to work I escalate to HTTP_Request/HTTP_Client/curl, if that won't cut it I escalate to automating a browser using mozilla's control object (Firefox) or shdocvw/webbroser object on the Micro$oft side. By the time I get there I'm virtually indistinguishable from a human user (I can add sleeps, etc.).

I think if someone really wants in it's pretty much a foregone conclusion they're gonna get there eventually. Your way will indeed make it hard for the masses as Nop mentioned but it won't be 100% unless you tell us all what the url is and ask us nicely to stay away Applause  Applause

Cheers,
td

perkiset

Hey NBs -

Do you remember the post @ Syndk8 a bit ago that contained VERY SCARY code that Zwart and I decompiled a bit? The asshole that wrote that had something entirely different in mind, but the point was that he sent code down to the client box that was only decipherable and executable by the code sent down to it... we couldn't even mimic the code sent, because it was sort of a "one-way cipher" kind of thing. Just a gear turner.

The fact of the matter is that you are correct: if IE can run it, it can be figgerd out. Two issues: do you want the code obscurred, or do you want the packets unsniffable? You can apply both techniques, but at the end of the day, if IE can run it, eventually so could I.

So realistically, this is a game of "good enough" not "perfect." Honestly, my experience with most of our adversaries is that they are *considerably* less capable than you or I. A bit of obsfucation, perhaps a lightly encrypted or munged packet and the vast majority of spamming techniques will be rendered impotent. What you have described above is *so far beyond* the vast vast vast majority of who you will be playing with it's not even funny. Add a timed technique, like a requirement that the packet is called within 2 seconds of dispatch and then it expires on your side and you're gonna be looking good.

Looking forward to playing with this my friend  Applause

/p


Perkiset's Place Home   Politics @ Perkiset's