#!/usr/bin/env python2

from MalwrAPI import MalwrAPI
from Queue import Queue
from bs4 import BeautifulSoup
import requests

api = MalwrAPI(verbose=True)
autoruns = Queue()

def get_signatures(sample, res):
    sigs = list()
    for s in res.findAll('div', {'class':'signature'}):
        sigs.append(s.text.strip())
        if 'autorun' in s.text:
            autoruns.put(sample)
    #sigs = [sig.strip() for sig in res.findAll('div', {'class':'signature'})]
    return sigs

def print_sigs(submission, sigs):
    print("hash: %s" % submission['hash'])
    print("url: %s" % submission['submission_url'])
    print("signatures:")
    for s in sigs:
        print("\t- %s" % s)

def scrape(page):
    res = requests.get("https://malwr.com/analysis/?page=%s" % page, headers={'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:41.0) Gecko/20100101 Firefox/41.0'})
    soup = BeautifulSoup(res.text, "html.parser")
    samples = list()
    for row in soup.findAll('table')[0].findAll('tr'):
        sample = dict()
        tds = row.findAll('td')
        print tds
        if tds:
            sample['date'] = tds[0].text.strip()
            sample['url'] = tds[1].find('a')['href']
            sample['hash'] = tds[1].text.strip()
            sample['filename'] = tds[2].text.strip()
            sample['filetype'] = tds[3].text.strip()
            sample['av'] = tds[4].text.strip()

            samples.append(sample)

    #next_page = soup.findAll('div', {'class': 'pagination'})[0].find('a')['href']
    return samples

def virus_total(sample):
    res = requests.post('https://www.virustotal.com/vtapi/v2/file/rescan', data={'resource': sample['hash'], 'apikey': ''})

    print("\n\n")
if __name__ == '__main__':
    """
    recent = api.get_recent_analyses()
    for sample in recent:
        res = api.request_to_soup(api.url + sample['submission_url'])
        sigs = get_signatures(sample, res)

        print_sigs(sample, sigs)

    print "Autoruns"
    while not autoruns.empty():
        sample = autoruns.get()
        print("\t- %s" % sample['hash'])
    """

    samples = scrape(1)
    for s in samples:
        print s
    
