Skip to content

Commit

Permalink
adding new tools
Browse files Browse the repository at this point in the history
  • Loading branch information
kaiocp committed Jul 13, 2021
1 parent faf60bb commit 3002983
Show file tree
Hide file tree
Showing 5 changed files with 110 additions and 0 deletions.
6 changes: 6 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

25 changes: 25 additions & 0 deletions gerador-de-hashes/gh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import hashlib

string = input("Digite a string ou texto a ser gerado a hash: ")

menu = int(input('''#### MENU - ESCOLHA O TIPO DE HASH
1) MD5
2) SHA1
3) SHA256
4) SHA512
Digite o número do hash a ser gerado: '''))

if menu == 1:
resultado = hashlib.md5(string.encode('utf-8'))
print("A hash MD5 da string", string, "é", resultado.hexdigest())
elif menu == 2:
resultado = hashlib.sha1(string.encode('utf-8'))
print("A hash SHA1 da string", string, "é", resultado.hexdigest())
elif menu == 3:
resultado = hashlib.sha256(string.encode('utf-8'))
print("A hash SHA256 da string", string, "é", resultado.hexdigest())
elif menu == 4:
resultado = hashlib.sha512(string.encode('utf-8'))
print("A hash SHA512 da string", string, "é", resultado.hexdigest())
else:
print("Algo de errado não está certo")
8 changes: 8 additions & 0 deletions gerador-de-wordlists/gw.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import itertools

string = input("String a ser permutada: ")

resultado = itertools.permutations(string, len(string))

for i in resultado:
print(''.join(i))
23 changes: 23 additions & 0 deletions scrapper/ws.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from bs4 import BeautifulSoup
import requests

site = requests.get("https://www.climatempo.com.br/").content

soup = BeautifulSoup(site, 'html.parser')

#print(soup.prettify())

#temperatura = soup.find("span", class_="_block _margin-b-5 -gray")

#print(soup.title.string)

#print(soup.a)

#print(soup.p.string)

#print(soup.find('admin'))





48 changes: 48 additions & 0 deletions webcrawler/wc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import requests
from bs4 import BeautifulSoup
import operator
from collections import Counter


def start(url):
wordlist = []
source_code = requests.get(url).text

soup = BeautifulSoup(source_code, 'html.parser')

for each_text in soup.findAll('div', {'class': 'entry-content'}):
content = each_text.text
words = content.lower().split()
for each_word in words:
wordlist.append(each_word)
clean_wordlist(wordlist)


def clean_wordlist(wordlist):
clean_list = []
for word in wordlist:
symbols = '!@#$%^&*()_-+={[}]|\;:"<>?/., '
for i in range(0, len(symbols)):
word = word.replace(symbols[i], '')
if len(word) > 0:
clean_list.append(word)
create_dictionary(clean_list)


def create_dictionary(clean_list):
word_count = {}

for word in clean_list:
if word in word_count:
word_count[word] += 1
else:
word_count[word] = 1

c = Counter(word_count)

top = c.most_common(10)
print(top)


if __name__ == '__main__':
start("https://www.geeksforgeeks.org/programming-language-choose/")

0 comments on commit 3002983

Please sign in to comment.