(07-26-2014, 02:08 AM)GetR34P3D Wrote: Can't download your program from sinister link, it's broken :c
That's strange... Here's the source code:
scrapperGui.pyw
Code:
from PyQt4 import QtCore, QtGui
from threading import Thread
import os
import sys
import proxyScrapper
class Input(QtGui.QDialog):
scrapping = False
css = """
QWidget
{
Background:#111;
color: #fff;
font:13px italic;
font-weight:bold;
border-radius: 2px;
border: 1px solid #383838;
text-align: center;
vertical-align: middle;
}
QDialog{
background-image:url('http://www.sinister.ly/images/e-red/mainbg.png') top left repeat -x;
font-size:13px;
color: #fff;
border: 1px solid #383838;
}
QLineEdit:hover{
background: #831010;
font-size: 12px;
height: 43px;
color:black;
}
QLineEdit{
Background: #211;
font-size:12px;
height:43px;
color:#383838;
}
QPushButton:hover{
Background:#831010;
font-size:12px;
color:black;
}
QPushButton{
Background: #211;
font-size:12px;
color:#383838;
}
QSpinBox{
Background: #211;
font-size:12px;
height: 43px;
color:#383838;
border: 1px solid #383838;
padding-top: 3px;
padding-bottom: 3px;
}
QSpinBox:hover{
Background:#831010;
font-size:12px;
height:43px;
color:black;
}
"""
def __init__(self):
super(Input, self).__init__()
topLayout = QtGui.QHBoxLayout()
topLayout.addStretch()
mainLayout = QtGui.QVBoxLayout()
self.startButton = QtGui.QPushButton("Scrap proxies")
self.closeButton = QtGui.QPushButton(" X ")
self.closeButton.clicked.connect(self.exitWindow)
mainLayout.addLayout(topLayout)
topLayout.addWidget(self.closeButton)
self.output = QtGui.QTextEdit()
self.output.setReadOnly(True)
self.setStyleSheet(self.css)
layout = self.createBits()
mainLayout.addWidget(layout)
mainLayout.addWidget(self.startButton)
mainLayout.addWidget(self.output)
self.startButton.clicked.connect(self.makeConcurrent)
self.setLayout(mainLayout)
self.setWindowTitle("Lightening scrapper")
self.setWindowIcon(QtGui.QIcon("./Lightning.ico"))
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
self.output.append("#######################\n"+
"## Lightening scrapper\n"+
"## By: 3SidedSquare \n"+
"## Beta version \n"+
"#######################\n")
def exitWindow(self):
self.close()
def createBits(self):
ret = QtGui.QGroupBox("")
layout = QtGui.QFormLayout()
boxLabel = QtGui.QLabel("\nText file containing proxies\n Proxies must be in the format \"address:port\"")
self.textInput = QtGui.QLineEdit()
counterLabel = QtGui.QLabel("\nMaximum number of threads to run")
self.counterSpin = QtGui.QSpinBox()
self.counterSpin.setMaximum(5000)
layout.addRow(boxLabel)
layout.addRow(self.textInput)
layout.addRow(counterLabel)
layout.addRow(self.counterSpin)
self.delayLabel = QtGui.QLabel("\nOS response delay(ms)")
self.delayspin = QtGui.QSpinBox()
layout.addRow(self.delayLabel)
layout.addRow(self.delayspin)
ret.setLayout(layout)
return ret
def makeConcurrent(self):
thre = Thread(target=self.startScrap)
thre.start()
main = None
def startScrap(self):
file = self.textInput.text()
num = self.counterSpin.value()
if(self.scrapping == False):
self.scrapping = True
self.startButton.setText("Stop scrap")
self.output.append("############\nStarting scrap")
self.main = proxyScrapper.scrapper(file, num, self.output, delay=self.delayspin.value()/100)
self.main.scrapProxies(file)
self.scrapping = False
self.output.append("Done\n############")
self.output.moveCursor(QtGui.QTextCursor.End)
self.startButton.setText("Start scrap")
else:
self.output.append("stopping scrap....")
self.output.moveCursor(QtGui.QTextCursor.End)
self.main.stopScrap()
self.startButton.setText("Start scrap")
self.scrapping = False
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
dialog = Input()
try:
sys.exit(dialog.exec_())
except:
pass
proxyScrapper.pyw
Code:
from PyQt4 import QtCore,QtGui
import urllib
from urllib import request, error
from http import client
import socket
#from multiprocessing import Process, Queue
import threading
import time
class scrapper():
putFile = None
threads = 0
stop = False
thre = {}
printbackup = "\nProxy scrapper made by 3 sided square"
def __init__(self, fileName, maxThreads, outputwindow=None, delay=0.2):
self.delay = delay
self.outputWindow = outputwindow
if(fileName == ""):
self.forceOutput("!File name can not be blank, aborted!")
self.stop = True
return
if(maxThreads < 1):
self.forceOutput("!Too few threads, aborted!")
self.stop = True
return
if(delay <= 0):
self.forceOutput("!OS delay is too low, aborted!")
self.stop= True
return
self.threads = maxThreads
self.forceOutput("\nStarting scrap on " + fileName)
self.stop = False
def output(self, String):
if(self.outputWindow != None):
self.printbackup+=String
self.outputWindow.verticalScrollBar().setValue(self.outputWindow.verticalScrollBar().maximum())
else:
print(String)
def forceOutput(self, string):
if(self.outputWindow != None):
self.outputWindow.append(string)
self.outputWindow.verticalScrollBar().setValue(self.outputWindow.verticalScrollBar().maximum())
else:
print(string)
def makeCall(self,string, putFile):
self.output("Scrapping " + string[:-1] + "...")
self.threads -=1
col = string.find(":")
ip = string[0:col]
port = string[col+1:len(string)]
try:
proxy_handler = urllib.request.ProxyHandler({'http' : 'http://'+ip+":"+port})
proxy_auth_handler = urllib.request.HTTPBasicAuthHandler()
opener = urllib.request.build_opener(proxy_handler, proxy_auth_handler)
opener.open("http://www.google.com")
self.output("\nFound successful: " + string)
self.threads+=1
except urllib.error.URLError as detail:
self.threads+=1
self.output("\nCould not find " + string)
return
except urllib.error.HTTPError as detail:
self.thread+=1
self.output("\n" + string + " did not respond")
return
except http.client.BadStatusLine as detail:
self.thread+=1
self.output("\n" + string + " did not give a valid response")
return
except:
self.thread+=1
return
while(not self.stop):
try:
putFile = open(putFile, 'a')
putFile.write(string)
putFile.close()
return
except:
time.sleep(1)
def scrapProxies(self, fileName):
try:
file = open(fileName, 'r')
putFile = fileName.split(".")[0]+"_working.txt"
put = open(putFile, 'w')
put.close()
self.output("file read...")
except:
self.output("\nFailed to read in file")
self.stop = True
return
if(True):
x = 0
linelot = ""
self.output("\nStarting check")
for line in file:
self.output("\nCueing " + line + "...")
self.thre[x] = threading.Thread(target=self.makeCall, name=None, args=(line,putFile))
while(not self.stop):
if(self.threads > 0):
try:
self.thre[x].start()
time.sleep(self.delay)
if(self.outputWindow != None):
self.outputWindow.append(self.printbackup)
self.printbackup=""
break
except:
time.sleep(self.delay)
time.sleep(self.delay)
x += 1
file.close()
def stopScrap(self):
time.sleep(0.5)
timeBreak = self.threads * self.delay * 4
self.forceOutput("\nForceing quit after " + str(timeBreak) + " Seconds.")
self.stop=True
for thread in self.thre:
thread.stop()
time.sleep(timeBreak)
if(self.thre != {}):
self.scary()
def scary(self):
exit()
In the second file, find the line that says
Code:
opener.open("http://www.google.com")
and change it to whatever you need.
BTW, it will connect to the server, and retrieve the page, but won't lode any flash objects/javascript, so I'm afraid you'll have to do a little more work if you want to spam google adsense or similar.