Skip to content
This repository has been archived by the owner on Mar 14, 2020. It is now read-only.

Commit

Permalink
Debug build;
Browse files Browse the repository at this point in the history
  • Loading branch information
twiddli committed Feb 14, 2016
1 parent 91dc66d commit 43c3146
Show file tree
Hide file tree
Showing 6 changed files with 88 additions and 29 deletions.
11 changes: 9 additions & 2 deletions version/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,8 @@ def show_in_library(self):
fetch_instance.FINISHED.connect(done)
fetch_instance.FINISHED.connect(metadata_spinner.before_hide)
thread.finished.connect(thread.deleteLater)
thread.start()
#thread.start()
fetch_instance.auto_web_metadata()
metadata_spinner.show()
else:
self.notif_bubble.update_text("Oops!", "Auto metadata fetcher is already running...")
Expand Down Expand Up @@ -607,7 +608,13 @@ def init_toolbar(self):
# debug specfic code
if app_constants.DEBUG:
def debug_func():
pass
from PyQt5.QtGui import QImageReader
m = QMessageBox(self)
m.setText("{}".format(QImageReader.supportedImageFormats()))
m.exec()
self._lbl = QLabel()
self._lbl.setPixmap(QPixmap('horopic.jpg'))
self._lbl.show()

debug_btn = QToolButton()
debug_btn.setText("DEBUG BUTTON")
Expand Down
5 changes: 3 additions & 2 deletions version/app_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import os, sys
import settings
import database
from database import db_constants

# Version number
vs = '0.27'
Expand Down Expand Up @@ -90,6 +90,7 @@
DOWNLOAD_MANAGER = None

# image paths
SAMPLE_PATH = os.path.join(static_dir, "sample.png")
SORT_PATH = os.path.join(static_dir, "sort.png")
GALLERY_ICO_PATH = os.path.join(static_dir, "gallery_ico.ico")
GALLERY_DEF_ICO_PATH = os.path.join(static_dir, "gallery_def_ico.ico")
Expand Down Expand Up @@ -543,7 +544,7 @@ class WrongLogin(Exception): pass
Moshidesu, peaceanpizza, utterbull, LePearlo</p>
</body></html>
""".format(vs, database.db_constants.CURRENT_DB_VERSION)
""".format(vs, db_constants.CURRENT_DB_VERSION)

TROUBLE_GUIDE =\
"""
Expand Down
2 changes: 1 addition & 1 deletion version/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def _auto_metadata_process(self, galleries, hen, valid_url):
hash = None
try:
if not gallery.hashes:
hash_dict = add_method_queue(HashDB.gen_gallery_hash, False, gallery, 0, 'mid')
hash_dict = add_method_queue(HashDB.gen_gallery_hash, False, gallery, 0, 'mid', True)
if hash_dict:
hash = hash_dict['mid']
else:
Expand Down
40 changes: 30 additions & 10 deletions version/gallerydb.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
#along with Happypanda. If not, see <http://www.gnu.org/licenses/>.
#"""

import datetime, os, scandir, threading, logging, queue, uuid # for unique filename
import datetime, os, scandir, threading, logging, queue, io, uuid # for unique filename
import re as regex

from PyQt5.QtCore import Qt, QObject, pyqtSignal
Expand Down Expand Up @@ -133,6 +133,7 @@ def gen_thumbnail(gallery, width=app_constants.THUMB_W_SIZE,
raise IndexError

# Do the scaling

image = QImage()
image.load(img_path)
if image.isNull():
Expand Down Expand Up @@ -1136,16 +1137,17 @@ def get_gallery_hash(cls, gallery_id, chapter, page=None):
return hashes

@classmethod
def gen_gallery_hash(cls, gallery, chapter, page=None):
def gen_gallery_hash(cls, gallery, chapter, page=None, color_img=False, _name=None):
"""
Generate hash for a specific chapter.
Set page to only generate specific page
page: 'mid' or number or list of numbers
color_img: if true then a hash to colored img will be returned if possible
Returns dict with chapter number or 'mid' as key and hash as value
"""
assert isinstance(gallery, Gallery)
assert isinstance(chapter, int)
if page:
if page != None:
assert isinstance(page, (int, str, list))
skip_gen = False
if gallery.id:
Expand All @@ -1162,10 +1164,10 @@ def gen_gallery_hash(cls, gallery, chapter, page=None):
pass
if isinstance(page, (int, list)):
if isinstance(page, int):
page = [page]
_page = [page]
h = {}
t = False
for p in page:
for p in _page:
if p in hashes:
h[p] = hashes[p]
else:
Expand All @@ -1183,7 +1185,7 @@ def gen_gallery_hash(cls, gallery, chapter, page=None):
skip_gen = False


if not skip_gen:
if not skip_gen or color_img:

def look_exists(page):
"""check if hash already exists in database
Expand All @@ -1207,8 +1209,13 @@ def look_exists(page):
for n, i in enumerate(imgs):
pages[n] = i

if page:
if page != None:
pages = {}
if color_img:
print("trying color img")
# if first img is colored, then return hash of that
if not utils.image_greyscale(imgs[0]):
cls.gen_gallery_hash(gallery, chapter, 0, _name=page)
if page == 'mid':
imgs = imgs[len(imgs)//2]
pages[len(imgs)//2] = imgs
Expand Down Expand Up @@ -1246,9 +1253,13 @@ def look_exists(page):
return {}

pages = {}
if page:
if page != None:
p = 0
con = zip.dir_contents(chap.path)
if color_img:
# if first img is colored, then return hash of that
if not utils.image_greyscale(io.BytesIO(zip.open(con[0], False))):
return cls.gen_gallery_hash(gallery, chapter, 0, _name=page)
if page == 'mid':
p = len(con)//2
img = con[p]
Expand Down Expand Up @@ -1283,10 +1294,19 @@ def look_exists(page):
if executing:
cls.executemany(cls, 'INSERT INTO hashes(hash, series_id, chapter_id, page) VALUES(?, ?, ?, ?)',
executing)


if page == 'mid':
return {'mid':list(hashes.values())[0]}
r_hash = {'mid':list(hashes.values())[0]}
else:
return hashes
r_hash = hashes

if _name != None:
try:
r_hash[_name] = r_hash[page]
except KeyError:
pass
return r_hash

@classmethod
def gen_gallery_hashes(cls, gallery):
Expand Down
5 changes: 3 additions & 2 deletions version/pewnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -968,11 +968,12 @@ def no_hits_found_check(html):
for h in search_string:
log_d('Hash search: {}'.format(h))
self.begin_lock()
hash_search = hash_url + h + '&fs_exp=1' # to enable expunged
if cookies:
self.check_cookie(cookies)
r = requests.get(hash_url+h, timeout=30, headers=self.HEADERS, cookies=self.COOKIES)
r = requests.get(hash_search, timeout=30, headers=self.HEADERS, cookies=self.COOKIES)
else:
r = requests.get(hash_url+h, timeout=30, headers=self.HEADERS)
r = requests.get(hash_search, timeout=30, headers=self.HEADERS)
self.end_lock()
if not self.handle_error(r):
return 'error'
Expand Down
54 changes: 42 additions & 12 deletions version/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,23 @@
#along with Happypanda. If not, see <http://www.gnu.org/licenses/>.
#"""

import datetime, os, subprocess, sys, logging, zipfile
import hashlib, shutil, uuid, re, scandir, rarfile, json
import datetime
import os
import subprocess
import sys
import logging
import zipfile
import hashlib
import shutil
import uuid
import re
import scandir
import rarfile
import json
import send2trash

from PIL import Image,ImageChops

import app_constants

from database import db_constants
Expand All @@ -27,7 +40,7 @@
log_e = log.error
log_c = log.critical

IMG_FILES = ('.jpg','.bmp','.png','.gif', '.jpeg')
IMG_FILES = ('.jpg','.bmp','.png','.gif', '.jpeg')
ARCHIVE_FILES = ('.zip', '.cbz', '.rar', '.cbr')
FILE_FILTER = '*.zip *.cbz *.rar *.cbr'
IMG_FILTER = '*.jpg *.bmp *.png *.jpeg'
Expand Down Expand Up @@ -88,12 +101,11 @@ def _eze(self, fp):
self.metadata['language'] = ezedata['language']
d = ezedata['upload_date']
# should be zero padded
d[1] = int("0"+str(d[1])) if len(str(d[1])) == 1 else d[1]
d[3] = int("0"+str(d[1])) if len(str(d[1])) == 1 else d[1]
self.metadata['pub_date'] = datetime.datetime.strptime(
"{} {} {}".format(d[0], d[1], d[3]), "%Y %m %d")
d[1] = int("0" + str(d[1])) if len(str(d[1])) == 1 else d[1]
d[3] = int("0" + str(d[1])) if len(str(d[1])) == 1 else d[1]
self.metadata['pub_date'] = datetime.datetime.strptime("{} {} {}".format(d[0], d[1], d[3]), "%Y %m %d")
l = ezedata['source']
self.metadata['link'] = 'http://'+l['site']+'.org/g/'+str(l['gid'])+'/'+l['token']
self.metadata['link'] = 'http://' + l['site'] + '.org/g/' + str(l['gid']) + '/' + l['token']
return True

def _hdoujindler(self, fp):
Expand Down Expand Up @@ -400,7 +412,7 @@ def dir_contents(self, dir_name):
raise app_constants.FileNotFoundInArchive
if not dir_name:
if self.type == self.zip:
con = [x for x in self.namelist() if x.count('/') == 0 or \
con = [x for x in self.namelist() if x.count('/') == 0 or \
(x.count('/') == 1 and x.endswith('/'))]
elif self.type == self.rar:
con = [x for x in self.namelist() if x.count('/') == 0]
Expand Down Expand Up @@ -486,7 +498,7 @@ def gallery_eval(d):
for n in con:
if not n.lower().endswith(IMG_FILES):
gallery_probability -= 1
if gallery_probability >= (len(con)*0.8):
if gallery_probability >= (len(con) * 0.8):
return d
if zip_dirs: # There are directories in the top folder
# check parent
Expand Down Expand Up @@ -532,7 +544,7 @@ def recursive_gallery_check(path):
for f in files:
if not f.lower().endswith(IMG_FILES):
gallery_probability -= 1
if gallery_probability >= (len(files)*0.8):
if gallery_probability >= (len(files) * 0.8):
found_paths += 1
gallery_dirs.append(root)
log_i('Found {} in {}'.format(found_paths, path).encode(errors='ignore'))
Expand Down Expand Up @@ -589,7 +601,7 @@ def find_f_img_archive(extract=True):
else:
t_p = zip.extract(chapterpath, t_p)
else:
zip.extract_all(t_p) # Compatibility reasons.. TODO: REMOVE IN BETA
zip.extract_all(t_p) # Compatibility reasons.. TODO: REMOVE IN BETA
if app_constants.USE_EXTERNAL_VIEWER:
filepath = t_p
else:
Expand Down Expand Up @@ -1042,3 +1054,21 @@ def get_terms(term):
pieces.append(ns_tag)

return pieces

def image_greyscale(filepath):
"""
Check if image is monochrome (1 channel or 3 identical channels)
"""
im = Image.open(filepath).convert("RGB")
if im.mode not in ("L", "RGB"):
return False

if im.mode == "RGB":
rgb = im.split()
if ImageChops.difference(rgb[0],rgb[1]).getextrema()[1] != 0:
return False
if ImageChops.difference(rgb[0],rgb[2]).getextrema()[1] != 0:
return False
return True


0 comments on commit 43c3146

Please sign in to comment.