Upgrade to Python 3

pull/2/head
Noah 2014-06-02 14:04:44 -07:00
parent da81ea21bc
commit fce481e85d
9 changed files with 28 additions and 18 deletions

View File

@ -66,7 +66,7 @@ def commit(document, data):
segment = "/".join(directory) segment = "/".join(directory)
if len(segment) > 0 and not os.path.isdir(segment): if len(segment) > 0 and not os.path.isdir(segment):
logger.debug("JsonDB: mkdir {}".format(segment)) logger.debug("JsonDB: mkdir {}".format(segment))
os.mkdir(segment, 0755) os.mkdir(segment, 0o755)
# Update the cached document. # Update the cached document.
set_cache(document, data, expires=cache_lifetime) set_cache(document, data, expires=cache_lifetime)

View File

@ -2,6 +2,8 @@
"""Debug and logging functions.""" """Debug and logging functions."""
from __future__ import print_function
from flask import g, request from flask import g, request
import logging import logging
@ -18,7 +20,7 @@ class LogHandler(logging.Handler):
name = "-nobody-" name = "-nobody-"
line = line.replace('$prefix$', '') line = line.replace('$prefix$', '')
print line print(line)
# Set up the logger. # Set up the logger.
logger = logging.getLogger("rophako") logger = logging.getLogger("rophako")

View File

@ -40,7 +40,7 @@ def get_index():
# Hide any private posts if we aren't logged in. # Hide any private posts if we aren't logged in.
if not g.info["session"]["login"]: if not g.info["session"]["login"]:
for post_id, data in db.iteritems(): for post_id, data in db.items():
if data["privacy"] == "private": if data["privacy"] == "private":
del db[post_id] del db[post_id]
@ -53,7 +53,7 @@ def get_categories():
# Group by tags. # Group by tags.
tags = {} tags = {}
for post, data in index.iteritems(): for post, data in index.items():
for tag in data["categories"]: for tag in data["categories"]:
if not tag in tags: if not tag in tags:
tags[tag] = 0 tags[tag] = 0
@ -110,7 +110,7 @@ def post_entry(post_id, fid, epoch, author, subject, avatar, categories,
while True: while True:
collision = False collision = False
for k, v in index.iteritems(): for k, v in index.items():
# Skip the same post, for updates. # Skip the same post, for updates.
if k == post_id: continue if k == post_id: continue
@ -191,7 +191,7 @@ def resolve_id(fid):
return None return None
# It's a friendly ID. Scan for it. # It's a friendly ID. Scan for it.
for post_id, data in index.iteritems(): for post_id, data in index.items():
if data["fid"] == fid: if data["fid"] == fid:
return int(post_id) return int(post_id)

View File

@ -8,6 +8,7 @@ import hashlib
import urllib import urllib
import random import random
import re import re
import sys
import config import config
import rophako.jsondb as JsonDB import rophako.jsondb as JsonDB
@ -207,7 +208,7 @@ def write_subscribers(thread, subs):
def random_hash(): def random_hash():
"""Get a short random hash to use as the ID for a comment.""" """Get a short random hash to use as the ID for a comment."""
md5 = hashlib.md5() md5 = hashlib.md5()
md5.update(str(random.randint(0, 1000000))) md5.update(str(random.randint(0, 1000000)).encode("utf-8"))
return md5.hexdigest() return md5.hexdigest()
@ -223,7 +224,13 @@ def gravatar(email):
} }
if default: if default:
params["d"] = default params["d"] = default
url = "//www.gravatar.com/avatar/" + hashlib.md5(email.lower()).hexdigest() + "?" url = "//www.gravatar.com/avatar/" + hashlib.md5(email.lower().encode("utf-8")).hexdigest() + "?"
url += urllib.urlencode(params)
# URL encode the params, the Python 2 & Python 3 way.
if sys.version_info[0] < 3:
url += urllib.urlencode(params)
else:
url += urllib.parse.urlencode(params)
return url return url
return "" return ""

View File

@ -46,7 +46,7 @@ def load_theme():
try: try:
data = json.loads(text) data = json.loads(text)
except Exception, e: except Exception as e:
logger.error("Couldn't load JSON from emoticon file: {}".format(e)) logger.error("Couldn't load JSON from emoticon file: {}".format(e))
data = {} data = {}

View File

@ -536,5 +536,5 @@ def random_name(filetype):
def random_hash(): def random_hash():
"""Get a short random hash to use as the base name for a photo.""" """Get a short random hash to use as the base name for a photo."""
md5 = hashlib.md5() md5 = hashlib.md5()
md5.update(str(random.randint(0, 1000000))) md5.update(str(random.randint(0, 1000000)).encode("utf-8"))
return md5.hexdigest()[:8] return md5.hexdigest()[:8]

View File

@ -147,7 +147,7 @@ def exists(uid=None, username=None):
def hash_password(password): def hash_password(password):
return bcrypt.hashpw(str(password), bcrypt.gensalt(config.BCRYPT_ITERATIONS)) return bcrypt.hashpw(str(password).encode("utf-8"), bcrypt.gensalt(config.BCRYPT_ITERATIONS)).decode("utf-8")
def check_auth(username, password): def check_auth(username, password):
@ -163,7 +163,8 @@ def check_auth(username, password):
db = get_user(username=username) db = get_user(username=username)
# Check the password. # Check the password.
return bcrypt.hashpw(str(password), str(db["password"])) == db["password"] test = bcrypt.hashpw(str(password).encode("utf-8"), str(db["password"]).encode("utf-8")).decode("utf-8")
return test == db["password"]
def get_next_uid(): def get_next_uid():

View File

@ -37,7 +37,7 @@ def login():
# Redirect them to a local page? # Redirect them to a local page?
url = request.form.get("url", "") url = request.form.get("url", "")
if url[0] == "/": if url.startswith("/"):
return redirect(url) return redirect(url)
return redirect(url_for("index")) return redirect(url_for("index"))

View File

@ -68,7 +68,7 @@ def entry(fid):
# Inject information about this post's siblings. # Inject information about this post's siblings.
index = Blog.get_index() index = Blog.get_index()
siblings = [None, None] # previous, next siblings = [None, None] # previous, next
sorted_ids = map(lambda y: int(y), sorted(index.keys(), key=lambda x: index[x]["time"], reverse=True)) sorted_ids = list(map(lambda y: int(y), sorted(index.keys(), key=lambda x: index[x]["time"], reverse=True)))
for i in range(0, len(sorted_ids)): for i in range(0, len(sorted_ids)):
if sorted_ids[i] == post_id: if sorted_ids[i] == post_id:
# Found us! # Found us!
@ -195,7 +195,7 @@ def update():
g.info["min"], g.info["min"],
g.info["sec"], g.info["sec"],
) )
except ValueError, e: except ValueError as e:
invalid = True invalid = True
flash("Invalid date/time: " + str(e)) flash("Invalid date/time: " + str(e))
@ -364,7 +364,7 @@ def partial_index():
# Are we narrowing by category? # Are we narrowing by category?
if category: if category:
# Narrow down the index to just those that match the category. # Narrow down the index to just those that match the category.
for post_id, data in index.iteritems(): for post_id, data in index.items():
if not category in data["categories"]: if not category in data["categories"]:
continue continue
pool[post_id] = data pool[post_id] = data
@ -445,7 +445,7 @@ def get_index_posts(index):
"""Helper function to get data for the blog index page.""" """Helper function to get data for the blog index page."""
# Separate the sticky posts from the normal ones. # Separate the sticky posts from the normal ones.
sticky, normal = set(), set() sticky, normal = set(), set()
for post_id, data in index.iteritems(): for post_id, data in index.items():
if data["sticky"]: if data["sticky"]:
sticky.add(post_id) sticky.add(post_id)
else: else: