1
0
Fork 0
mirror of https://github.com/myned/modufur.git synced 2024-11-01 21:02:38 +00:00

Merge branch 'dev'

This commit is contained in:
Myned 2019-09-24 19:13:22 -04:00
commit e1982b6ace
No known key found for this signature in database
GPG key ID: BC58C09870A63E59
3 changed files with 29 additions and 16 deletions

View file

@ -20,6 +20,7 @@ beautifulsoup4 = "*"
"discord.py" = {extras = ["voice"],git = "https://github.com/Rapptz/discord.py"} "discord.py" = {extras = ["voice"],git = "https://github.com/Rapptz/discord.py"}
"hurry.filesize" = "*" "hurry.filesize" = "*"
requests = "*" requests = "*"
html5lib = "*"
[dev-packages] [dev-packages]
lxml = "*" lxml = "*"

View file

@ -3,7 +3,7 @@ import ast
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from lxml import html import lxml
from hurry.filesize import size, alternative from hurry.filesize import size, alternative
from misc import exceptions as exc from misc import exceptions as exc
@ -12,7 +12,7 @@ from utils import utils as u
# async def get_harry(url): # async def get_harry(url):
# content = await u.fetch('https://iqdb.harry.lu', params={'url': url}) # content = await u.fetch('https://iqdb.harry.lu', params={'url': url})
# soup = BeautifulSoup(content, 'html.parser') # soup = BeautifulSoup(content, 'html5lib')
# #
# if soup.find('div', id='show1').string is 'Not the right one? ': # if soup.find('div', id='show1').string is 'Not the right one? ':
# parent = soup.find('th', string='Probable match:').parent.parent # parent = soup.find('th', string='Probable match:').parent.parent
@ -41,17 +41,25 @@ from utils import utils as u
async def get_kheina(url): async def get_kheina(url):
content = await u.fetch('https://kheina.com', params={'url': url}) content = await u.fetch('https://kheina.com', params={'url': url}, text=True)
soup = BeautifulSoup(content, 'html.parser')
results = ast.literal_eval(soup.find('data', id='results').string)[-1] content = content.replace('"', 'quot;').replace(''', 'apos;')
iqdbdata = ast.literal_eval(soup.find('data', id='iqdbdata').string)[0] soup = BeautifulSoup(content, 'html5lib')
results = soup.find('data', id='results').string.replace('quot;', '"').replace('apos;', ''')
results = ast.literal_eval(results)
iqdbdata = soup.find('data', id='iqdbdata').string
iqdbdata = ast.literal_eval(iqdbdata)
for e in results:
if iqdbdata[0]['iqdbid'] in e:
match = e
break
result = { result = {
'source': results[3], 'source': match[3],
'artist': results[4], 'artist': match[4],
'thumbnail': f'https://f002.backblazeb2.com/file/kheinacom/{results[1]}.jpg', 'thumbnail': f'https://f002.backblazeb2.com/file/kheinacom/{match[1]}.jpg',
'similarity': str(int(float(iqdbdata['similarity']))), 'similarity': str(int(float(iqdbdata[0]['similarity']))),
'database': 'Kheina' 'database': 'Kheina'
} }
@ -63,6 +71,7 @@ async def get_saucenao(url):
'https://saucenao.com/search.php', 'https://saucenao.com/search.php',
params={'url': url, 'api_key': u.config['saucenao_api'], 'output_type': 2}, params={'url': url, 'api_key': u.config['saucenao_api'], 'output_type': 2},
json=True) json=True)
results = content['results'][0] results = content['results'][0]
for i in range(len(content['results'])): for i in range(len(content['results'])):
if 'e621' in content['results'][i]['header']['index_name']: if 'e621' in content['results'][i]['header']['index_name']:
@ -108,7 +117,7 @@ async def get_post(url):
async def get_image(url): async def get_image(url):
content = await u.fetch(url) content = await u.fetch(url)
value = html.fromstring(content).xpath( value = lxml.html.fromstring(content).xpath(
'string(/html/body/div[@id="content"]/div[@id="post-view"]/div[@class="content"]/div[2]/img/@src)') 'string(/html/body/div[@id="content"]/div[@id="post-view"]/div[@class="content"]/div[2]/img/@src)')
return value return value

View file

@ -85,14 +85,17 @@ color = d.Color(0x1A1A1A)
last_commands = {} last_commands = {}
async def fetch(url, *, params={}, json=False, response=False): async def fetch(url, *, params={}, json=False, response=False, text=False):
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.get(url, params=params, headers={'User-Agent': 'Myned/Modufur'}, ssl=False) as r: async with session.get(url, params=params, headers={'User-Agent': 'Myned/Modufur'}, ssl=False) as r:
if response: if json:
return r
elif json:
return await r.json() return await r.json()
return await r.read() elif response:
return r
elif text:
return await r.text()
else:
return await r.read()
def generate_embed(ctx, *, title=d.Embed.Empty, kind='rich', description=d.Embed.Empty, url=d.Embed.Empty, timestamp=d.Embed.Empty, colour=color, footer={}, image=d.Embed.Empty, thumbnail=d.Embed.Empty, author={}, fields=[]): def generate_embed(ctx, *, title=d.Embed.Empty, kind='rich', description=d.Embed.Empty, url=d.Embed.Empty, timestamp=d.Embed.Empty, colour=color, footer={}, image=d.Embed.Empty, thumbnail=d.Embed.Empty, author={}, fields=[]):