diff --git a/README.rst b/README.rst index 70c88be..ba8748a 100644 --- a/README.rst +++ b/README.rst @@ -95,15 +95,12 @@ Optional: NSFW Detection -------------- -0x0 supports classification of NSFW content via -`a machine learning model `_. -This works for images and video files and requires the following -Python modules: +0x0 supports classification of NSFW content via Yahoo’s open_nsfw Caffe +neural network model. This works for images and video files and requires +the following: -* torch -* transformers -* pillow -* `av `_ +* Caffe Python module (built for Python 3) +* `PyAV `_ Virus Scanning diff --git a/cleanup.py b/cleanup.py index 4d52c93..14fbc61 100755 --- a/cleanup.py +++ b/cleanup.py @@ -5,4 +5,4 @@ print("Instead, please run") print("") print(" $ FLASK_APP=fhost flask prune") print("") -exit(1) +exit(1); diff --git a/fhost.py b/fhost.py index 5578f6e..8201954 100755 --- a/fhost.py +++ b/fhost.py @@ -1,7 +1,8 @@ #!/usr/bin/env python3 +# -*- coding: utf-8 -*- """ - Copyright © 2024 Mia Herkt + Copyright © 2020 Mia Herkt Licensed under the EUPL, Version 1.2 or - as soon as approved by the European Commission - subsequent versions of the EUPL (the "License"); @@ -18,79 +19,77 @@ and limitations under the License. """ -from flask import Flask, abort, make_response, redirect, render_template, \ - Request, request, Response, send_from_directory, url_for +from flask import Flask, abort, make_response, redirect, request, send_from_directory, url_for, Response, render_template from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from sqlalchemy import and_, or_ -from sqlalchemy.orm import declared_attr -import sqlalchemy.types as types from jinja2.exceptions import * from jinja2 import ChoiceLoader, FileSystemLoader -from hashlib import file_digest +from hashlib import sha256 from magic import Magic from mimetypes import guess_extension import click -import enum import os import sys import time import datetime -import ipaddress import typing import requests import secrets -import re from validators import url as url_valid from pathlib import Path app = Flask(__name__, instance_relative_config=True) app.config.update( - SQLALCHEMY_TRACK_MODIFICATIONS=False, - PREFERRED_URL_SCHEME="https", # nginx users: make sure to have - # 'uwsgi_param UWSGI_SCHEME $scheme;' in - # your config - MAX_CONTENT_LENGTH=256 * 1024 * 1024, - MAX_URL_LENGTH=4096, - USE_X_SENDFILE=False, - FHOST_USE_X_ACCEL_REDIRECT=True, # expect nginx by default - FHOST_STORAGE_PATH="up", - FHOST_MAX_EXT_LENGTH=9, - FHOST_SECRET_BYTES=16, - FHOST_EXT_OVERRIDE={ - "audio/flac": ".flac", - "image/gif": ".gif", - "image/jpeg": ".jpg", - "image/png": ".png", - "image/svg+xml": ".svg", - "video/webm": ".webm", - "video/x-matroska": ".mkv", - "application/octet-stream": ".bin", - "text/plain": ".log", - "text/plain": ".txt", - "text/x-diff": ".diff", + SQLALCHEMY_TRACK_MODIFICATIONS = False, + PREFERRED_URL_SCHEME = "https", # nginx users: make sure to have 'uwsgi_param UWSGI_SCHEME $scheme;' in your config + MAX_CONTENT_LENGTH = 256 * 1024 * 1024, + MAX_URL_LENGTH = 4096, + USE_X_SENDFILE = False, + FHOST_USE_X_ACCEL_REDIRECT = True, # expect nginx by default + FHOST_STORAGE_PATH = "up", + FHOST_MAX_EXT_LENGTH = 9, + FHOST_SECRET_BYTES = 16, + FHOST_EXT_OVERRIDE = { + "audio/flac" : ".flac", + "image/gif" : ".gif", + "image/jpeg" : ".jpg", + "image/png" : ".png", + "image/svg+xml" : ".svg", + "video/webm" : ".webm", + "video/x-matroska" : ".mkv", + "application/octet-stream" : ".bin", + "text/plain" : ".log", + "text/plain" : ".txt", + "text/x-diff" : ".diff", }, - NSFW_DETECT=False, - NSFW_THRESHOLD=0.92, - VSCAN_SOCKET=None, - VSCAN_QUARANTINE_PATH="quarantine", - VSCAN_IGNORE=[ + FHOST_MIME_BLACKLIST = [ + "application/x-dosexec", + "application/java-archive", + "application/java-vm" + ], + FHOST_UPLOAD_BLACKLIST = None, + NSFW_DETECT = False, + NSFW_THRESHOLD = 0.608, + VSCAN_SOCKET = None, + VSCAN_QUARANTINE_PATH = "quarantine", + VSCAN_IGNORE = [ "Eicar-Test-Signature", "PUA.Win.Packer.XmMusicFile", ], - VSCAN_INTERVAL=datetime.timedelta(days=7), - URL_ALPHABET="DEQhd2uFteibPwq0SWBInTpA_jcZL5GKz3YCR14Ulk87Jors9vNHgfaOmMX" - "y6Vx-", + VSCAN_INTERVAL = datetime.timedelta(days=7), + URL_ALPHABET = "DEQhd2uFteibPwq0SWBInTpA_jcZL5GKz3YCR14Ulk87Jors9vNHgfaOmMXy6Vx-", ) -app.config.from_pyfile("config.py") -app.jinja_loader = ChoiceLoader([ - FileSystemLoader(str(Path(app.instance_path) / "templates")), - app.jinja_loader -]) +if not app.config["TESTING"]: + app.config.from_pyfile("config.py") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(str(Path(app.instance_path) / "templates")), + app.jinja_loader + ]) -if app.config["DEBUG"]: - app.config["FHOST_USE_X_ACCEL_REDIRECT"] = False + if app.config["DEBUG"]: + app.config["FHOST_USE_X_ACCEL_REDIRECT"] = False if app.config["NSFW_DETECT"]: from nsfw_detect import NSFWDetector @@ -98,7 +97,7 @@ if app.config["NSFW_DETECT"]: try: mimedetect = Magic(mime=True, mime_encoding=False) -except TypeError: +except: print("""Error: You have installed the wrong version of the 'magic' module. Please install python-magic.""") sys.exit(1) @@ -106,11 +105,10 @@ Please install python-magic.""") db = SQLAlchemy(app) migrate = Migrate(app, db) - class URL(db.Model): __tablename__ = "URL" - id = db.Column(db.Integer, primary_key=True) - url = db.Column(db.UnicodeText, unique=True) + id = db.Column(db.Integer, primary_key = True) + url = db.Column(db.UnicodeText, unique = True) def __init__(self, url): self.url = url @@ -121,7 +119,6 @@ class URL(db.Model): def geturl(self): return url_for("get", path=self.getname(), _external=True) + "\n" - @staticmethod def get(url): u = URL.query.filter_by(url=url).first() @@ -132,35 +129,12 @@ class URL(db.Model): return u - -class IPAddress(types.TypeDecorator): - impl = types.LargeBinary - cache_ok = True - - def process_bind_param(self, value, dialect): - match value: - case ipaddress.IPv6Address(): - value = (value.ipv4_mapped or value).packed - case ipaddress.IPv4Address(): - value = value.packed - - return value - - def process_result_value(self, value, dialect): - if value is not None: - value = ipaddress.ip_address(value) - if type(value) is ipaddress.IPv6Address: - value = value.ipv4_mapped or value - - return value - - class File(db.Model): - id = db.Column(db.Integer, primary_key=True) - sha256 = db.Column(db.String, unique=True) + id = db.Column(db.Integer, primary_key = True) + sha256 = db.Column(db.String, unique = True) ext = db.Column(db.UnicodeText) mime = db.Column(db.UnicodeText) - addr = db.Column(IPAddress(16)) + addr = db.Column(db.UnicodeText) ua = db.Column(db.UnicodeText) removed = db.Column(db.Boolean, default=False) nsfw_score = db.Column(db.Float) @@ -181,19 +155,18 @@ class File(db.Model): @property def is_nsfw(self) -> bool: - if self.nsfw_score: - return self.nsfw_score > app.config["NSFW_THRESHOLD"] - return False + return self.nsfw_score and self.nsfw_score > app.config["NSFW_THRESHOLD"] def getname(self): return u"{0}{1}".format(su.enbase(self.id), self.ext) def geturl(self): n = self.getname() - a = "nsfw" if self.is_nsfw else None - return url_for("get", path=n, secret=self.secret, - _external=True, _anchor=a) + "\n" + if self.is_nsfw: + return url_for("get", path=n, secret=self.secret, _external=True, _anchor="nsfw") + "\n" + else: + return url_for("get", path=n, secret=self.secret, _external=True) + "\n" def getpath(self) -> Path: return Path(app.config["FHOST_STORAGE_PATH"]) / self.sha256 @@ -204,37 +177,33 @@ class File(db.Model): self.removed = permanent self.getpath().unlink(missing_ok=True) - """ - Returns the epoch millisecond that a file should expire - - Uses the expiration time provided by the user (requested_expiration) - upper-bounded by an algorithm that computes the size based on the size of - the file. - - That is, all files are assigned a computed expiration, which can be - voluntarily shortened by the user either by providing a timestamp in - milliseconds since epoch or a duration in hours. - """ - @staticmethod + # Returns the epoch millisecond that a file should expire + # + # Uses the expiration time provided by the user (requested_expiration) + # upper-bounded by an algorithm that computes the size based on the size of the + # file. + # + # That is, all files are assigned a computed expiration, which can voluntarily + # shortened by the user either by providing a timestamp in epoch millis or a + # duration in hours. def get_expiration(requested_expiration, size) -> int: - current_epoch_millis = time.time() * 1000 + current_epoch_millis = time.time() * 1000; # Maximum lifetime of the file in milliseconds - max_lifespan = get_max_lifespan(size) + this_files_max_lifespan = get_max_lifespan(size); # The latest allowed expiration date for this file, in epoch millis - max_expiration = max_lifespan + 1000 * time.time() + this_files_max_expiration = this_files_max_lifespan + 1000 * time.time(); if requested_expiration is None: - return max_expiration + return this_files_max_expiration elif requested_expiration < 1650460320000: # Treat the requested expiration time as a duration in hours requested_expiration_ms = requested_expiration * 60 * 60 * 1000 - return min(max_expiration, - current_epoch_millis + requested_expiration_ms) + return min(this_files_max_expiration, current_epoch_millis + requested_expiration_ms) else: - # Treat expiration time as a timestamp in epoch millis - return min(max_expiration, requested_expiration) + # Treat the requested expiration time as a timestamp in epoch millis + return min(this_files_max_expiration, requested_expiration) """ requested_expiration can be: @@ -242,38 +211,29 @@ class File(db.Model): - a duration (in hours) that the file should live for - a timestamp in epoch millis that the file should expire at - Any value greater that the longest allowed file lifespan will be rounded - down to that value. + Any value greater that the longest allowed file lifespan will be rounded down to that + value. """ - @staticmethod - def store(file_, requested_expiration: typing.Optional[int], addr, ua, - secret: bool): - fstream = file_.stream - digest = file_digest(fstream, "sha256").hexdigest() - fstream.seek(0, os.SEEK_END) - flen = fstream.tell() - fstream.seek(0) + def store(file_, requested_expiration: typing.Optional[int], addr, ua, secret: bool): + data = file_.read() + digest = sha256(data).hexdigest() def get_mime(): - guess = mimedetect.from_descriptor(fstream.fileno()) - app.logger.debug(f"MIME - specified: '{file_.content_type}' - " - f"detected: '{guess}'") + guess = mimedetect.from_buffer(data) + app.logger.debug(f"MIME - specified: '{file_.content_type}' - detected: '{guess}'") - if (not file_.content_type - or "/" not in file_.content_type - or file_.content_type == "application/octet-stream"): + if not file_.content_type or not "/" in file_.content_type or file_.content_type == "application/octet-stream": mime = guess else: mime = file_.content_type + if mime in app.config["FHOST_MIME_BLACKLIST"] or guess in app.config["FHOST_MIME_BLACKLIST"]: + abort(415) + if len(mime) > 128: abort(400) - for flt in MIMEFilter.query.all(): - if flt.check(guess): - abort(403, flt.reason) - - if mime.startswith("text/") and "charset" not in mime: + if mime.startswith("text/") and not "charset" in mime: mime += "; charset=utf-8" return mime @@ -285,8 +245,7 @@ class File(db.Model): gmime = mime.split(";")[0] guess = guess_extension(gmime) - app.logger.debug(f"extension - specified: '{ext}' - detected: " - f"'{guess}'") + app.logger.debug(f"extension - specified: '{ext}' - detected: '{guess}'") if not ext: if gmime in app.config["FHOST_EXT_OVERRIDE"]: @@ -298,7 +257,7 @@ class File(db.Model): return ext[:app.config["FHOST_MAX_EXT_LENGTH"]] or ".bin" - expiration = File.get_expiration(requested_expiration, flen) + expiration = File.get_expiration(requested_expiration, len(data)) isnew = True f = File.query.filter_by(sha256=digest).first() @@ -329,17 +288,17 @@ class File(db.Model): if isnew: f.secret = None if secret: - f.secret = \ - secrets.token_urlsafe(app.config["FHOST_SECRET_BYTES"]) + f.secret = secrets.token_urlsafe(app.config["FHOST_SECRET_BYTES"]) storage = Path(app.config["FHOST_STORAGE_PATH"]) storage.mkdir(parents=True, exist_ok=True) p = storage / digest if not p.is_file(): - file_.save(p) + with open(p, "wb") as of: + of.write(data) - f.size = flen + f.size = len(data) if not f.nsfw_score and app.config["NSFW_DETECT"]: f.nsfw_score = nsfw.detect(str(p)) @@ -349,129 +308,8 @@ class File(db.Model): return f, isnew -class RequestFilter(db.Model): - __tablename__ = "request_filter" - id = db.Column(db.Integer, primary_key=True) - type = db.Column(db.String(20), index=True, nullable=False) - comment = db.Column(db.UnicodeText) - - __mapper_args__ = { - "polymorphic_on": type, - "with_polymorphic": "*", - "polymorphic_identity": "empty" - } - - def __init__(self, comment: str = None): - self.comment = comment - - -class AddrFilter(RequestFilter): - addr = db.Column(IPAddress(16), unique=True) - - __mapper_args__ = {"polymorphic_identity": "addr"} - - def __init__(self, addr: ipaddress._BaseAddress, comment: str = None): - self.addr = addr - super().__init__(comment=comment) - - def check(self, addr: ipaddress._BaseAddress) -> bool: - if type(addr) is ipaddress.IPv6Address: - addr = addr.ipv4_mapped or addr - return addr == self.addr - - def check_request(self, r: Request) -> bool: - return self.check(ipaddress.ip_address(r.remote_addr)) - - @property - def reason(self) -> str: - return f"Your IP Address ({self.addr.compressed}) is blocked from " \ - "uploading files." - - -class IPNetwork(types.TypeDecorator): - impl = types.Text - cache_ok = True - - def process_bind_param(self, value, dialect): - if value is not None: - value = value.compressed - - return value - - def process_result_value(self, value, dialect): - if value is not None: - value = ipaddress.ip_network(value) - - return value - - -class NetFilter(RequestFilter): - net = db.Column(IPNetwork) - - __mapper_args__ = {"polymorphic_identity": "net"} - - def __init__(self, net: ipaddress._BaseNetwork, comment: str = None): - self.net = net - super().__init__(comment=comment) - - def check(self, addr: ipaddress._BaseAddress) -> bool: - if type(addr) is ipaddress.IPv6Address: - addr = addr.ipv4_mapped or addr - return addr in self.net - - def check_request(self, r: Request) -> bool: - return self.check(ipaddress.ip_address(r.remote_addr)) - - @property - def reason(self) -> str: - return f"Your network ({self.net.compressed}) is blocked from " \ - "uploading files." - - -class HasRegex: - @declared_attr - def regex(cls): - return cls.__table__.c.get("regex", db.Column(db.UnicodeText)) - - def check(self, s: str) -> bool: - return re.match(self.regex, s) is not None - - -class MIMEFilter(HasRegex, RequestFilter): - __mapper_args__ = {"polymorphic_identity": "mime"} - - def __init__(self, mime_regex: str, comment: str = None): - self.regex = mime_regex - super().__init__(comment=comment) - - def check_request(self, r: Request) -> bool: - if "file" in r.files: - return self.check(r.files["file"].mimetype) - - return False - - @property - def reason(self) -> str: - return "File MIME type not allowed." - - -class UAFilter(HasRegex, RequestFilter): - __mapper_args__ = {"polymorphic_identity": "ua"} - - def __init__(self, ua_regex: str, comment: str = None): - self.regex = ua_regex - super().__init__(comment=comment) - - def check_request(self, r: Request) -> bool: - return self.check(r.user_agent.string) - - @property - def reason(self) -> str: - return "User agent not allowed." - - class UrlEncoder(object): - def __init__(self, alphabet, min_length): + def __init__(self,alphabet, min_length): self.alphabet = alphabet self.min_length = min_length @@ -491,21 +329,17 @@ class UrlEncoder(object): result += self.alphabet.index(c) * (n ** i) return result - su = UrlEncoder(alphabet=app.config["URL_ALPHABET"], min_length=1) - def fhost_url(scheme=None): if not scheme: return url_for(".fhost", _external=True).rstrip("/") else: return url_for(".fhost", _external=True, _scheme=scheme).rstrip("/") - def is_fhost_url(url): return url.startswith(fhost_url()) or url.startswith(fhost_url("https")) - def shorten(url): if len(url) > app.config["MAX_URL_LENGTH"]: abort(414) @@ -517,6 +351,16 @@ def shorten(url): return u.geturl() +def in_upload_bl(addr): + if app.config["FHOST_UPLOAD_BLACKLIST"]: + with app.open_instance_resource(app.config["FHOST_UPLOAD_BLACKLIST"], "r") as bl: + check = addr.lstrip("::ffff:") + for l in bl.readlines(): + if not l.startswith("#"): + if check == l.rstrip(): + return True + + return False """ requested_expiration can be: @@ -524,11 +368,13 @@ requested_expiration can be: - a duration (in hours) that the file should live for - a timestamp in epoch millis that the file should expire at -Any value greater that the longest allowed file lifespan will be rounded down -to that value. +Any value greater that the longest allowed file lifespan will be rounded down to that +value. """ -def store_file(f, requested_expiration: typing.Optional[int], addr, ua, - secret: bool): +def store_file(f, requested_expiration: typing.Optional[int], addr, ua, secret: bool): + if in_upload_bl(addr): + return "Your host is blocked from uploading files.\n", 451 + sf, isnew = File.store(f, requested_expiration, addr, ua, secret) response = make_response(sf.geturl()) @@ -539,12 +385,11 @@ def store_file(f, requested_expiration: typing.Optional[int], addr, ua, return response - def store_url(url, addr, ua, secret: bool): if is_fhost_url(url): abort(400) - h = {"Accept-Encoding": "identity"} + h = { "Accept-Encoding" : "identity" } r = requests.get(url, stream=True, verify=False, headers=h) try: @@ -553,14 +398,13 @@ def store_url(url, addr, ua, secret: bool): return str(e) + "\n" if "content-length" in r.headers: - length = int(r.headers["content-length"]) + l = int(r.headers["content-length"]) - if length <= app.config["MAX_CONTENT_LENGTH"]: + if l <= app.config["MAX_CONTENT_LENGTH"]: def urlfile(**kwargs): - return type('', (), kwargs)() + return type('',(),kwargs)() - f = urlfile(read=r.raw.read, - content_type=r.headers["content-type"], filename="") + f = urlfile(read=r.raw.read, content_type=r.headers["content-type"], filename="") return store_file(f, None, addr, ua, secret) else: @@ -568,9 +412,10 @@ def store_url(url, addr, ua, secret: bool): else: abort(411) - def manage_file(f): - if request.form["token"] != f.mgmt_token: + try: + assert(request.form["token"] == f.mgmt_token) + except: abort(401) if "delete" in request.form: @@ -589,7 +434,6 @@ def manage_file(f): abort(400) - @app.route("/", methods=["GET", "POST"]) @app.route("/s//", methods=["GET", "POST"]) def get(path, secret=None): @@ -626,9 +470,7 @@ def get(path, secret=None): response.headers["Content-Length"] = f.size response.headers["X-Accel-Redirect"] = "/" + str(fpath) else: - response = send_from_directory( - app.config["FHOST_STORAGE_PATH"], f.sha256, - mimetype=f.mime) + response = send_from_directory(app.config["FHOST_STORAGE_PATH"], f.sha256, mimetype = f.mime) response.headers["X-Expires"] = f.expiration return response @@ -646,19 +488,11 @@ def get(path, secret=None): abort(404) - @app.route("/", methods=["GET", "POST"]) def fhost(): if request.method == "POST": - for flt in RequestFilter.query.all(): - if flt.check_request(request): - abort(403, flt.reason) - sf = None secret = "secret" in request.form - addr = ipaddress.ip_address(request.remote_addr) - if type(addr) is ipaddress.IPv6Address: - addr = addr.ipv4_mapped or addr if "file" in request.files: try: @@ -666,7 +500,7 @@ def fhost(): return store_file( request.files["file"], int(request.form["expires"]), - addr, + request.remote_addr, request.user_agent.string, secret ) @@ -678,14 +512,14 @@ def fhost(): return store_file( request.files["file"], None, - addr, + request.remote_addr, request.user_agent.string, secret ) elif "url" in request.form: return store_url( request.form["url"], - addr, + request.remote_addr, request.user_agent.string, secret ) @@ -696,17 +530,14 @@ def fhost(): else: return render_template("index.html") - @app.route("/robots.txt") def robots(): return """User-agent: * Disallow: / """ - @app.errorhandler(400) @app.errorhandler(401) -@app.errorhandler(403) @app.errorhandler(404) @app.errorhandler(411) @app.errorhandler(413) @@ -715,23 +546,20 @@ Disallow: / @app.errorhandler(451) def ehandler(e): try: - return render_template(f"{e.code}.html", id=id, request=request, - description=e.description), e.code + return render_template(f"{e.code}.html", id=id, request=request), e.code except TemplateNotFound: return "Segmentation fault\n", e.code - @app.cli.command("prune") def prune(): """ Clean up expired files - Deletes any files from the filesystem which have hit their expiration time. - This doesn't remove them from the database, only from the filesystem. - It is recommended that server owners run this command regularly, or set it - up on a timer. + Deletes any files from the filesystem which have hit their expiration time. This + doesn't remove them from the database, only from the filesystem. It's recommended + that server owners run this command regularly, or set it up on a timer. """ - current_time = time.time() * 1000 + current_time = time.time() * 1000; # The path to where uploaded files are stored storage = Path(app.config["FHOST_STORAGE_PATH"]) @@ -745,7 +573,7 @@ def prune(): ) ) - files_removed = 0 + files_removed = 0; # For every expired file... for file in expired_files: @@ -758,33 +586,31 @@ def prune(): # Remove it from the file system try: os.remove(file_path) - files_removed += 1 + files_removed += 1; except FileNotFoundError: - pass # If the file was already gone, we're good + pass # If the file was already gone, we're good except OSError as e: print(e) print( "\n------------------------------------" - "Encountered an error while trying to remove file {file_path}." - "Make sure the server is configured correctly, permissions " - "are okay, and everything is ship shape, then try again.") - return + "Encountered an error while trying to remove file {file_path}. Double" + "check to make sure the server is configured correctly, permissions are" + "okay, and everything is ship shape, then try again.") + return; # Finally, mark that the file was removed - file.expiration = None + file.expiration = None; db.session.commit() print(f"\nDone! {files_removed} file(s) removed") +""" For a file of a given size, determine the largest allowed lifespan of that file -""" -For a file of a given size, determine the largest allowed lifespan of that file +Based on the current app's configuration: Specifically, the MAX_CONTENT_LENGTH, as well +as FHOST_{MIN,MAX}_EXPIRATION. -Based on the current app's configuration: -Specifically, the MAX_CONTENT_LENGTH, as well as FHOST_{MIN,MAX}_EXPIRATION. - -This lifespan may be shortened by a user's request, but no files should be -allowed to expire at a point after this number. +This lifespan may be shortened by a user's request, but no files should be allowed to +expire at a point after this number. Value returned is a duration in milliseconds. """ @@ -794,13 +620,11 @@ def get_max_lifespan(filesize: int) -> int: max_size = app.config.get("MAX_CONTENT_LENGTH", 256 * 1024 * 1024) return min_exp + int((-max_exp + min_exp) * (filesize / max_size - 1) ** 3) - def do_vscan(f): if f["path"].is_file(): with open(f["path"], "rb") as scanf: try: - res = list(app.config["VSCAN_SOCKET"].instream(scanf).values()) - f["result"] = res[0] + f["result"] = list(app.config["VSCAN_SOCKET"].instream(scanf).values())[0] except: f["result"] = ("SCAN FAILED", None) else: @@ -808,12 +632,11 @@ def do_vscan(f): return f - @app.cli.command("vscan") def vscan(): if not app.config["VSCAN_SOCKET"]: - print("Error: Virus scanning enabled but no connection method " - "specified.\nPlease set VSCAN_SOCKET.") + print("""Error: Virus scanning enabled but no connection method specified. +Please set VSCAN_SOCKET.""") sys.exit(1) qp = Path(app.config["VSCAN_QUARANTINE_PATH"]) @@ -827,11 +650,9 @@ def vscan(): File.last_vscan == None), File.removed == False) else: - res = File.query.filter(File.last_vscan == None, - File.removed == False) + res = File.query.filter(File.last_vscan == None, File.removed == False) - work = [{"path": f.getpath(), "name": f.getname(), "id": f.id} - for f in res] + work = [{"path" : f.getpath(), "name" : f.getname(), "id" : f.id} for f in res] results = [] for i, r in enumerate(p.imap_unordered(do_vscan, work)): @@ -845,10 +666,9 @@ def vscan(): found = True results.append({ - "id": r["id"], - "last_vscan": None if r["result"][0] == "SCAN FAILED" - else datetime.datetime.now(), - "removed": found}) + "id" : r["id"], + "last_vscan" : None if r["result"][0] == "SCAN FAILED" else datetime.datetime.now(), + "removed" : found}) db.session.bulk_update_mappings(File, results) db.session.commit() diff --git a/instance/config.example.py b/instance/config.example.py index 831498e..9740ca2 100644 --- a/instance/config.example.py +++ b/instance/config.example.py @@ -139,6 +139,30 @@ FHOST_EXT_OVERRIDE = { "text/x-diff" : ".diff", } + +# Control which files aren't allowed to be uploaded +# +# Certain kinds of files are never accepted. If the file claims to be one of +# these types of files, or if we look at the contents of the file and it looks +# like one of these filetypes, then we reject the file outright with a 415 +# UNSUPPORTED MEDIA EXCEPTION +FHOST_MIME_BLACKLIST = [ + "application/x-dosexec", + "application/java-archive", + "application/java-vm" +] + + +# A list of IP addresses which are blacklisted from uploading files +# +# Can be set to the path of a file with an IP address on each line. The file +# can also include comment lines using a pound sign (#). Paths are resolved +# relative to the instance/ directory. +# +# If this is set to None, then no IP blacklist will be consulted. +FHOST_UPLOAD_BLACKLIST = None + + # Enables support for detecting NSFW images # # Consult README.md for additional dependencies before setting to True @@ -152,7 +176,7 @@ NSFW_DETECT = False # are marked as NSFW. # # If NSFW_DETECT is set to False, then this has no effect. -NSFW_THRESHOLD = 0.92 +NSFW_THRESHOLD = 0.608 # If you want to scan files for viruses using ClamAV, specify the socket used diff --git a/migrations/env.py b/migrations/env.py index 46a8e20..4593816 100755 --- a/migrations/env.py +++ b/migrations/env.py @@ -81,7 +81,6 @@ def run_migrations_online(): finally: connection.close() - if context.is_offline_mode(): run_migrations_offline() else: diff --git a/migrations/versions/0659d7b9eea8_.py b/migrations/versions/0659d7b9eea8_.py index 1763219..2ef2151 100644 --- a/migrations/versions/0659d7b9eea8_.py +++ b/migrations/versions/0659d7b9eea8_.py @@ -15,8 +15,12 @@ import sqlalchemy as sa def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.add_column('file', sa.Column('mgmt_token', sa.String(), nullable=True)) + # ### end Alembic commands ### def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.drop_column('file', 'mgmt_token') + # ### end Alembic commands ### diff --git a/migrations/versions/0cd36ecdd937_.py b/migrations/versions/0cd36ecdd937_.py index 6e9c87a..349cab9 100644 --- a/migrations/versions/0cd36ecdd937_.py +++ b/migrations/versions/0cd36ecdd937_.py @@ -15,22 +15,28 @@ import sqlalchemy as sa def upgrade(): + ### commands auto generated by Alembic - please adjust! ### op.create_table('URL', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('url', sa.UnicodeText(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('url')) + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('url', sa.UnicodeText(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('url') + ) op.create_table('file', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('sha256', sa.String(), nullable=True), - sa.Column('ext', sa.UnicodeText(), nullable=True), - sa.Column('mime', sa.UnicodeText(), nullable=True), - sa.Column('addr', sa.UnicodeText(), nullable=True), - sa.Column('removed', sa.Boolean(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('sha256')) + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('sha256', sa.String(), nullable=True), + sa.Column('ext', sa.UnicodeText(), nullable=True), + sa.Column('mime', sa.UnicodeText(), nullable=True), + sa.Column('addr', sa.UnicodeText(), nullable=True), + sa.Column('removed', sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('sha256') + ) + ### end Alembic commands ### def downgrade(): + ### commands auto generated by Alembic - please adjust! ### op.drop_table('file') op.drop_table('URL') + ### end Alembic commands ### diff --git a/migrations/versions/30bfe33aa328_add_file_size_field.py b/migrations/versions/30bfe33aa328_add_file_size_field.py index b1032c4..e6ac279 100644 --- a/migrations/versions/30bfe33aa328_add_file_size_field.py +++ b/migrations/versions/30bfe33aa328_add_file_size_field.py @@ -19,7 +19,6 @@ from pathlib import Path Base = automap_base() - def upgrade(): op.add_column('file', sa.Column('size', sa.BigInteger(), nullable=True)) bind = op.get_bind() @@ -35,8 +34,8 @@ def upgrade(): p = storage / f.sha256 if p.is_file(): updates.append({ - "id": f.id, - "size": p.stat().st_size + "id" : f.id, + "size" : p.stat().st_size }) session.bulk_update_mappings(File, updates) diff --git a/migrations/versions/5cda1743b92d_add_request_filters.py b/migrations/versions/5cda1743b92d_add_request_filters.py deleted file mode 100644 index b03f1aa..0000000 --- a/migrations/versions/5cda1743b92d_add_request_filters.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Add request filters - -Revision ID: 5cda1743b92d -Revises: dd0766afb7d2 -Create Date: 2024-09-27 12:13:16.845981 - -""" - -# revision identifiers, used by Alembic. -revision = '5cda1743b92d' -down_revision = 'dd0766afb7d2' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.ext.automap import automap_base -from sqlalchemy.orm import Session -from flask import current_app -import ipaddress - -Base = automap_base() - - -def upgrade(): - op.create_table('request_filter', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('type', sa.String(length=20), nullable=False), - sa.Column('comment', sa.UnicodeText(), nullable=True), - sa.Column('addr', sa.LargeBinary(length=16), - nullable=True), - sa.Column('net', sa.Text(), nullable=True), - sa.Column('regex', sa.UnicodeText(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('addr')) - - with op.batch_alter_table('request_filter', schema=None) as batch_op: - batch_op.create_index(batch_op.f('ix_request_filter_type'), ['type'], - unique=False) - - bind = op.get_bind() - Base.prepare(autoload_with=bind) - RequestFilter = Base.classes.request_filter - session = Session(bind=bind) - - blp = current_app.config.get("FHOST_UPLOAD_BLACKLIST") - if blp: - with current_app.open_instance_resource(blp, "r") as bl: - for line in bl.readlines(): - if not line.startswith("#"): - line = line.strip() - if line.endswith(":"): - # old implementation uses str.startswith, - # which does not translate to networks - current_app.logger.warning( - f"Ignored address: {line}") - continue - - addr = ipaddress.ip_address(line).packed - flt = RequestFilter(type="addr", addr=addr) - session.add(flt) - - for mime in current_app.config.get("FHOST_MIME_BLACKLIST", []): - flt = RequestFilter(type="mime", regex=mime) - session.add(flt) - - session.commit() - - w = "Entries in your host and MIME blacklists have been migrated to " \ - "request filters and stored in the databaes, where possible. " \ - "The corresponding files and config options may now be deleted. " \ - "Note that you may have to manually restore them if you wish to " \ - "revert this with a db downgrade operation." - current_app.logger.warning(w) - - -def downgrade(): - with op.batch_alter_table('request_filter', schema=None) as batch_op: - batch_op.drop_index(batch_op.f('ix_request_filter_type')) - - op.drop_table('request_filter') diff --git a/migrations/versions/5cee97aab219_.py b/migrations/versions/5cee97aab219_.py index 4ebee5f..6c1a16b 100644 --- a/migrations/versions/5cee97aab219_.py +++ b/migrations/versions/5cee97aab219_.py @@ -15,9 +15,12 @@ import sqlalchemy as sa def upgrade(): - op.add_column('file', sa.Column('last_vscan', sa.DateTime(), - nullable=True)) + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file', sa.Column('last_vscan', sa.DateTime(), nullable=True)) + # ### end Alembic commands ### def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.drop_column('file', 'last_vscan') + # ### end Alembic commands ### diff --git a/migrations/versions/7e246705da6a_.py b/migrations/versions/7e246705da6a_.py index e8617b3..33dbf79 100644 --- a/migrations/versions/7e246705da6a_.py +++ b/migrations/versions/7e246705da6a_.py @@ -15,8 +15,12 @@ import sqlalchemy as sa def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.add_column('file', sa.Column('nsfw_score', sa.Float(), nullable=True)) + # ### end Alembic commands ### def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.drop_column('file', 'nsfw_score') + # ### end Alembic commands ### diff --git a/migrations/versions/939a08e1d6e5_.py b/migrations/versions/939a08e1d6e5_.py index 9e69758..e389b41 100644 --- a/migrations/versions/939a08e1d6e5_.py +++ b/migrations/versions/939a08e1d6e5_.py @@ -21,29 +21,24 @@ from sqlalchemy.orm import Session import os import time +""" For a file of a given size, determine the largest allowed lifespan of that file -""" -For a file of a given size, determine the largest allowed lifespan of that file +Based on the current app's configuration: Specifically, the MAX_CONTENT_LENGTH, as well +as FHOST_{MIN,MAX}_EXPIRATION. -Based on the current app's configuration: -Specifically, the MAX_CONTENT_LENGTH, as well as FHOST_{MIN,MAX}_EXPIRATION. - -This lifespan may be shortened by a user's request, but no files should be -allowed to expire at a point after this number. +This lifespan may be shortened by a user's request, but no files should be allowed to +expire at a point after this number. Value returned is a duration in milliseconds. """ def get_max_lifespan(filesize: int) -> int: - cfg = current_app.config - min_exp = cfg.get("FHOST_MIN_EXPIRATION", 30 * 24 * 60 * 60 * 1000) - max_exp = cfg.get("FHOST_MAX_EXPIRATION", 365 * 24 * 60 * 60 * 1000) - max_size = cfg.get("MAX_CONTENT_LENGTH", 256 * 1024 * 1024) + min_exp = current_app.config.get("FHOST_MIN_EXPIRATION", 30 * 24 * 60 * 60 * 1000) + max_exp = current_app.config.get("FHOST_MAX_EXPIRATION", 365 * 24 * 60 * 60 * 1000) + max_size = current_app.config.get("MAX_CONTENT_LENGTH", 256 * 1024 * 1024) return min_exp + int((-max_exp + min_exp) * (filesize / max_size - 1) ** 3) - Base = automap_base() - def upgrade(): op.add_column('file', sa.Column('expiration', sa.BigInteger())) @@ -53,14 +48,14 @@ def upgrade(): session = Session(bind=bind) storage = Path(current_app.config["FHOST_STORAGE_PATH"]) - current_time = time.time() * 1000 + current_time = time.time() * 1000; # List of file hashes which have not expired yet # This could get really big for some servers try: unexpired_files = os.listdir(storage) except FileNotFoundError: - return # There are no currently unexpired files + return # There are no currently unexpired files # Calculate an expiration date for all existing files @@ -70,7 +65,7 @@ def upgrade(): sa.not_(File.removed) ) ) - updates = [] # We coalesce updates to the database here + updates = [] # We coalesce updates to the database here # SQLite has a hard limit on the number of variables so we # need to do this the slow way @@ -79,18 +74,13 @@ def upgrade(): for file in files: file_path = storage / file.sha256 stat = os.stat(file_path) - # How long the file is allowed to live, in ms - max_age = get_max_lifespan(stat.st_size) - # When the file was created, in ms - file_birth = stat.st_mtime * 1000 - updates.append({ - 'id': file.id, - 'expiration': int(file_birth + max_age)}) + max_age = get_max_lifespan(stat.st_size) # How long the file is allowed to live, in ms + file_birth = stat.st_mtime * 1000 # When the file was created, in ms + updates.append({'id': file.id, 'expiration': int(file_birth + max_age)}) # Apply coalesced updates session.bulk_update_mappings(File, updates) session.commit() - def downgrade(): op.drop_column('file', 'expiration') diff --git a/migrations/versions/d9a53a28ba54_change_file_addr_to_ipaddress_type.py b/migrations/versions/d9a53a28ba54_change_file_addr_to_ipaddress_type.py deleted file mode 100644 index 2a46a27..0000000 --- a/migrations/versions/d9a53a28ba54_change_file_addr_to_ipaddress_type.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Change File.addr to IPAddress type - -Revision ID: d9a53a28ba54 -Revises: 5cda1743b92d -Create Date: 2024-09-27 14:03:06.764764 - -""" - -# revision identifiers, used by Alembic. -revision = 'd9a53a28ba54' -down_revision = '5cda1743b92d' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.ext.automap import automap_base -from sqlalchemy.orm import Session -from flask import current_app -import ipaddress - -Base = automap_base() - - -def upgrade(): - with op.batch_alter_table('file', schema=None) as batch_op: - batch_op.add_column(sa.Column('addr_tmp', sa.LargeBinary(16), - nullable=True)) - - bind = op.get_bind() - Base.prepare(autoload_with=bind) - File = Base.classes.file - session = Session(bind=bind) - - updates = [] - stmt = sa.select(File).where(sa.not_(File.addr == None)) - for f in session.scalars(stmt.execution_options(yield_per=1000)): - addr = ipaddress.ip_address(f.addr) - if type(addr) is ipaddress.IPv6Address: - addr = addr.ipv4_mapped or addr - - updates.append({ - "id": f.id, - "addr_tmp": addr.packed - }) - session.execute(sa.update(File), updates) - - with op.batch_alter_table('file', schema=None) as batch_op: - batch_op.drop_column('addr') - batch_op.alter_column('addr_tmp', new_column_name='addr') - - -def downgrade(): - with op.batch_alter_table('file', schema=None) as batch_op: - batch_op.add_column(sa.Column('addr_tmp', sa.UnicodeText, - nullable=True)) - - bind = op.get_bind() - Base.prepare(autoload_with=bind) - File = Base.classes.file - session = Session(bind=bind) - - updates = [] - stmt = sa.select(File).where(sa.not_(File.addr == None)) - for f in session.scalars(stmt.execution_options(yield_per=1000)): - addr = ipaddress.ip_address(f.addr) - if type(addr) is ipaddress.IPv6Address: - addr = addr.ipv4_mapped or addr - - updates.append({ - "id": f.id, - "addr_tmp": addr.compressed - }) - - session.execute(sa.update(File), updates) - - with op.batch_alter_table('file', schema=None) as batch_op: - batch_op.drop_column('addr') - batch_op.alter_column('addr_tmp', new_column_name='addr') - diff --git a/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py b/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py index 914d31a..4af7680 100644 --- a/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py +++ b/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py @@ -15,10 +15,16 @@ import sqlalchemy as sa def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('file', schema=None) as batch_op: batch_op.add_column(sa.Column('ua', sa.UnicodeText(), nullable=True)) + # ### end Alembic commands ### + def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('file', schema=None) as batch_op: batch_op.drop_column('ua') + + # ### end Alembic commands ### diff --git a/migrations/versions/e2e816056589_.py b/migrations/versions/e2e816056589_.py index b803ede..7c31ba9 100644 --- a/migrations/versions/e2e816056589_.py +++ b/migrations/versions/e2e816056589_.py @@ -15,8 +15,12 @@ import sqlalchemy as sa def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.add_column('file', sa.Column('secret', sa.String(), nullable=True)) + # ### end Alembic commands ### def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### op.drop_column('file', 'secret') + # ### end Alembic commands ### diff --git a/mod.css b/mod.css index 5563832..c001ef1 100644 --- a/mod.css +++ b/mod.css @@ -1,6 +1,5 @@ #ftable { width: 1fr; - height: 100%; } #infopane { @@ -11,7 +10,7 @@ #finfo { background: $boost; - height: 14; + height: 12; width: 1fr; box-sizing: content-box; } @@ -28,9 +27,20 @@ width: 1fr; } +#filter_container { + height: auto; + display: none; +} + +#filter_label { + content-align: right middle; + height: 1fr; + width: 20%; + margin: 0 1 0 2; +} + #filter_input { width: 1fr; - display: none; } Notification { diff --git a/mod.py b/mod.py index 7445c3b..0748a42 100755 --- a/mod.py +++ b/mod.py @@ -5,30 +5,28 @@ from sys import stdout import time from textual.app import App, ComposeResult -from textual.widgets import DataTable, Header, Footer, RichLog, Static, Input +from textual.widgets import DataTable, Header, Footer, TextLog, Static, Input from textual.containers import Horizontal, Vertical from textual.screen import Screen from textual import log from rich.text import Text from jinja2.filters import do_filesizeformat -import ipaddress -from fhost import db, File, AddrFilter, su, app as fhost_app -from modui import FileTable, mime, MpvWidget, Notification +from fhost import db, File, su, app as fhost_app, in_upload_bl +from modui import * fhost_app.app_context().push() - class NullptrMod(Screen): BINDINGS = [ ("q", "quit_app", "Quit"), - ("f1", "filter(1, 'Name')", "Lookup name"), - ("f2", "filter(2, 'IP address')", "Filter IP"), - ("f3", "filter(3, 'MIME Type')", "Filter MIME"), - ("f4", "filter(4, 'Extension')", "Filter Ext."), + ("f1", "filter(1, 'Lookup name:')", "Lookup name"), + ("f2", "filter(2, 'Filter IP address:')", "Filter IP"), + ("f3", "filter(3, 'Filter MIME Type:')", "Filter MIME"), + ("f4", "filter(4, 'Filter extension:')", "Filter Ext."), ("f5", "refresh", "Refresh"), ("f6", "filter_clear", "Clear filter"), - ("f7", "filter(5, 'User agent')", "Filter UA"), + ("f7", "filter(5, 'Filter user agent:')", "Filter UA"), ("r", "remove_file(False)", "Remove file"), ("ctrl+r", "remove_file(True)", "Ban file"), ("p", "ban_ip(False)", "Ban IP"), @@ -44,82 +42,74 @@ class NullptrMod(Screen): ftable.watch_query(None, None) def action_filter_clear(self): - self.finput.display = False + self.query_one("#filter_container").display = False ftable = self.query_one("#ftable") ftable.focus() ftable.query = ftable.base_query def action_filter(self, fcol: int, label: str): - self.finput.placeholder = label - self.finput.display = True - self.finput.focus() + self.query_one("#filter_label").update(label) + finput = self.query_one("#filter_input") self.filter_col = fcol + self.query_one("#filter_container").display = True + finput.focus() self._refresh_layout() if self.current_file: match fcol: - case 1: self.finput.value = "" - case 2: self.finput.value = self.current_file.addr.compressed - case 3: self.finput.value = self.current_file.mime - case 4: self.finput.value = self.current_file.ext - case 5: self.finput.value = self.current_file.ua or "" + case 1: finput.value = "" + case 2: finput.value = self.current_file.addr + case 3: finput.value = self.current_file.mime + case 4: finput.value = self.current_file.ext + case 5: finput.value = self.current_file.ua or "" def on_input_submitted(self, message: Input.Submitted) -> None: - self.finput.display = False + self.query_one("#filter_container").display = False ftable = self.query_one("#ftable") ftable.focus() - q = ftable.base_query if len(message.value): match self.filter_col: case 1: - try: - q = q.filter(File.id == su.debase(message.value)) - except ValueError: - return - case 2: - try: - addr = ipaddress.ip_address(message.value) - if type(addr) is ipaddress.IPv6Address: - addr = addr.ipv4_mapped or addr - q = q.filter(File.addr == addr) - except ValueError: - return - case 3: q = q.filter(File.mime.like(message.value)) - case 4: q = q.filter(File.ext.like(message.value)) - case 5: q = q.filter(File.ua.like(message.value)) - - ftable.query = q + try: ftable.query = ftable.base_query.filter(File.id == su.debase(message.value)) + except ValueError: pass + case 2: ftable.query = ftable.base_query.filter(File.addr.like(message.value)) + case 3: ftable.query = ftable.base_query.filter(File.mime.like(message.value)) + case 4: ftable.query = ftable.base_query.filter(File.ext.like(message.value)) + case 5: ftable.query = ftable.base_query.filter(File.ua.like(message.value)) + else: + ftable.query = ftable.base_query def action_remove_file(self, permanent: bool) -> None: if self.current_file: self.current_file.delete(permanent) db.session.commit() - self.mount(Notification(f"{'Banned' if permanent else 'Removed'}" - f"file {self.current_file.getname()}")) + self.mount(Notification(f"{'Banned' if permanent else 'Removed'} file {self.current_file.getname()}")) self.action_refresh() def action_ban_ip(self, nuke: bool) -> None: if self.current_file: - addr = self.current_file.addr - if AddrFilter.query.filter(AddrFilter.addr == addr).scalar(): - txt = f"{addr.compressed} is already banned" + if not fhost_app.config["FHOST_UPLOAD_BLACKLIST"]: + self.mount(Notification("Failed: FHOST_UPLOAD_BLACKLIST not set!")) + return else: - db.session.add(AddrFilter(addr)) - db.session.commit() - txt = f"Banned {addr.compressed}" + if in_upload_bl(self.current_file.addr): + txt = f"{self.current_file.addr} is already banned" + else: + with fhost_app.open_instance_resource(fhost_app.config["FHOST_UPLOAD_BLACKLIST"], "a") as bl: + print(self.current_file.addr.lstrip("::ffff:"), file=bl) + txt = f"Banned {self.current_file.addr}" - if nuke: - tsize = 0 - trm = 0 - for f in File.query.filter(File.addr == addr): - if f.getpath().is_file(): - tsize += f.size or f.getpath().stat().st_size - trm += 1 - f.delete(True) - db.session.commit() - txt += f", removed {trm} {'files' if trm != 1 else 'file'} " \ - f"totaling {do_filesizeformat(tsize, True)}" + if nuke: + tsize = 0 + trm = 0 + for f in File.query.filter(File.addr == self.current_file.addr): + if f.getpath().is_file(): + tsize += f.size or f.getpath().stat().st_size + trm += 1 + f.delete(True) + db.session.commit() + txt += f", removed {trm} {'files' if trm != 1 else 'file'} totaling {do_filesizeformat(tsize, True)}" self.mount(Notification(txt)) self._refresh_layout() ftable = self.query_one("#ftable") @@ -132,13 +122,13 @@ class NullptrMod(Screen): def compose(self) -> ComposeResult: yield Header() yield Horizontal( - FileTable(id="ftable", zebra_stripes=True, cursor_type="row"), + FileTable(id="ftable", zebra_stripes=True), Vertical( - DataTable(id="finfo", show_header=False, cursor_type="none"), + DataTable(id="finfo", show_header=False), MpvWidget(id="mpv"), - RichLog(id="ftextlog", auto_scroll=False), - id="infopane")) - yield Input(id="filter_input") + TextLog(id="ftextlog"), + id="infopane")) + yield Horizontal(Static("Filter:", id="filter_label"), Input(id="filter_input"), id="filter_container") yield Footer() def on_mount(self) -> None: @@ -153,17 +143,12 @@ class NullptrMod(Screen): self.mpvw = self.query_one("#mpv") self.ftlog = self.query_one("#ftextlog") - self.finput = self.query_one("#filter_input") - self.mimehandler = mime.MIMEHandler() - self.mimehandler.register(mime.MIMECategory.Archive, - self.handle_libarchive) + self.mimehandler.register(mime.MIMECategory.Archive, self.handle_libarchive) self.mimehandler.register(mime.MIMECategory.Text, self.handle_text) self.mimehandler.register(mime.MIMECategory.AV, self.handle_mpv) - self.mimehandler.register(mime.MIMECategory.Document, - self.handle_mupdf) - self.mimehandler.register(mime.MIMECategory.Fallback, - self.handle_libarchive) + self.mimehandler.register(mime.MIMECategory.Document, self.handle_mupdf) + self.mimehandler.register(mime.MIMECategory.Fallback, self.handle_libarchive) self.mimehandler.register(mime.MIMECategory.Fallback, self.handle_mpv) self.mimehandler.register(mime.MIMECategory.Fallback, self.handle_raw) @@ -175,7 +160,7 @@ class NullptrMod(Screen): def handle_text(self, cat): with open(self.current_file.getpath(), "r") as sf: - data = sf.read(1000000).replace("\033", "") + data = sf.read(1000000).replace("\033","") self.ftlog.write(data) return True @@ -190,8 +175,7 @@ class NullptrMod(Screen): self.mpvw.styles.height = "40%" self.mpvw.start_mpv("hex://" + imgdata, 0) - self.ftlog.write( - Text.from_markup(f"[bold]Pages:[/bold] {doc.page_count}")) + self.ftlog.write(Text.from_markup(f"[bold]Pages:[/bold] {doc.page_count}")) self.ftlog.write(Text.from_markup("[bold]Metadata:[/bold]")) for k, v in doc.metadata.items(): self.ftlog.write(Text.from_markup(f" [bold]{k}:[/bold] {v}")) @@ -216,8 +200,7 @@ class NullptrMod(Screen): for k, v in c.metadata.items(): self.ftlog.write(f" {k}: {v}") for s in c.streams: - self.ftlog.write( - Text(f"Stream {s.index}:", style="bold")) + self.ftlog.write(Text(f"Stream {s.index}:", style="bold")) self.ftlog.write(f" Type: {s.type}") if s.base_rate: self.ftlog.write(f" Frame rate: {s.base_rate}") @@ -236,31 +219,24 @@ class NullptrMod(Screen): else: c = chr(s) s = c - if c.isalpha(): - return f"\0[chartreuse1]{s}\0[/chartreuse1]" - if c.isdigit(): - return f"\0[gold1]{s}\0[/gold1]" + if c.isalpha(): return f"\0[chartreuse1]{s}\0[/chartreuse1]" + if c.isdigit(): return f"\0[gold1]{s}\0[/gold1]" if not c.isprintable(): g = "grey50" if c == "\0" else "cadet_blue" return f"\0[{g}]{s if len(s) == 2 else '.'}\0[/{g}]" return s - - return Text.from_markup( - "\n".join(' '.join( - map(fmt, map(''.join, zip(*[iter(c.hex())] * 2)))) + - f"{' ' * (16 - len(c))} {''.join(map(fmt, c))}" - for c in - map(lambda x: bytes([n for n in x if n is not None]), - zip_longest( - *[iter(binf.read(min(length, 16 * 10)))] * 16)))) + return Text.from_markup("\n".join(f"{' '.join(map(fmt, map(''.join, zip(*[iter(c.hex())] * 2))))}" + f"{' ' * (16 - len(c))}" + f" {''.join(map(fmt, c))}" + for c in map(lambda x: bytes([n for n in x if n != None]), + zip_longest(*[iter(binf.read(min(length, 16 * 10)))] * 16)))) with open(self.current_file.getpath(), "rb") as binf: self.ftlog.write(hexdump(binf, self.current_file.size)) if self.current_file.size > 16*10*2: binf.seek(self.current_file.size-16*10) self.ftlog.write(" [...] ".center(64, '─')) - self.ftlog.write(hexdump(binf, - self.current_file.size - binf.tell())) + self.ftlog.write(hexdump(binf, self.current_file.size - binf.tell())) return True @@ -271,33 +247,27 @@ class NullptrMod(Screen): self.finfo.add_rows([ ("ID:", str(f.id)), ("File name:", f.getname()), - ("URL:", f.geturl() - if fhost_app.config["SERVER_NAME"] - else "⚠ Set SERVER_NAME in config.py to display"), + ("URL:", f.geturl() if fhost_app.config["SERVER_NAME"] else "⚠ Set SERVER_NAME in config.py to display"), ("File size:", do_filesizeformat(f.size, True)), ("MIME type:", f.mime), ("SHA256 checksum:", f.sha256), - ("Uploaded by:", Text(f.addr.compressed)), + ("Uploaded by:", Text(f.addr)), ("User agent:", Text(f.ua or "")), ("Management token:", f.mgmt_token), ("Secret:", f.secret), - ("Is NSFW:", ("Yes" if f.is_nsfw else "No") + - (f" (Score: {f.nsfw_score:0.4f})" - if f.nsfw_score else " (Not scanned)")), + ("Is NSFW:", ("Yes" if f.is_nsfw else "No") + (f" (Score: {f.nsfw_score:0.4f})" if f.nsfw_score else " (Not scanned)")), ("Is banned:", "Yes" if f.removed else "No"), - ("Expires:", - time.strftime("%Y-%m-%d %H:%M:%S", - time.gmtime(File.get_expiration(f.expiration, - f.size)/1000))) + ("Expires:", time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(File.get_expiration(f.expiration, f.size)/1000))) ]) self.mpvw.stop_mpv(True) - self.ftlog.clear() + self.ftlog.remove() + self.query_one("#infopane").mount(TextLog(id="ftextlog")) + self.ftlog = self.query_one("#ftextlog") if f.getpath().is_file(): self.mimehandler.handle(f.mime, f.ext) - self.ftlog.scroll_to(x=0, y=0, animate=False) - + self.ftlog.scroll_home(animate=False) class NullptrModApp(App): CSS_PATH = "mod.css" @@ -308,7 +278,6 @@ class NullptrModApp(App): self.install_screen(self.main_screen, name="main") self.push_screen("main") - if __name__ == "__main__": app = NullptrModApp() app.run() diff --git a/modui/filetable.py b/modui/filetable.py index 4345227..7be0f1b 100644 --- a/modui/filetable.py +++ b/modui/filetable.py @@ -1,20 +1,18 @@ from textual.widgets import DataTable, Static from textual.reactive import Reactive -from textual.message import Message +from textual.message import Message, MessageTarget from textual import events, log from jinja2.filters import do_filesizeformat from fhost import File from modui import mime - class FileTable(DataTable): query = Reactive(None) order_col = Reactive(0) order_desc = Reactive(True) limit = 10000 - colmap = [File.id, File.removed, File.nsfw_score, None, File.ext, - File.size, File.mime] + colmap = [File.id, File.removed, File.nsfw_score, None, File.ext, File.size, File.mime] def __init__(self, **kwargs): super().__init__(**kwargs) @@ -23,9 +21,9 @@ class FileTable(DataTable): self.query = self.base_query class Selected(Message): - def __init__(self, f: File) -> None: + def __init__(self, sender: MessageTarget, f: File) -> None: self.file = f - super().__init__() + super().__init__(sender) def watch_order_col(self, old, value) -> None: self.watch_query(None, None) @@ -35,8 +33,6 @@ class FileTable(DataTable): def watch_query(self, old, value) -> None: def fmt_file(f: File) -> tuple: - mimemoji = mime.mimemoji.get(f.mime.split('/')[0], - mime.mimemoji.get(f.mime)) or ' ' return ( str(f.id), "🔴" if f.removed else " ", @@ -44,44 +40,29 @@ class FileTable(DataTable): "👻" if not f.getpath().is_file() else " ", f.getname(), do_filesizeformat(f.size, True), - f"{mimemoji} {f.mime}", + f"{mime.mimemoji.get(f.mime.split('/')[0], mime.mimemoji.get(f.mime)) or ' '} " + f.mime, ) if (self.query): + self.clear() order = FileTable.colmap[self.order_col] q = self.query - if order: - q = q.order_by(order.desc() if self.order_desc - else order, File.id) - qres = list(map(fmt_file, q.limit(self.limit))) + if order: q = q.order_by(order.desc() if self.order_desc else order, File.id) + self.add_rows(map(fmt_file, q.limit(self.limit))) - ri = 0 - row = self.cursor_coordinate.row - if row < self.row_count and row >= 0: - ri = int(self.get_row_at(row)[0]) + def _scroll_cursor_in_to_view(self, animate: bool = False) -> None: + region = self._get_cell_region(self.cursor_row, 0) + spacing = self._get_cell_border() + self.scroll_to_region(region, animate=animate, spacing=spacing) - self.clear() - self.add_rows(qres) - - for i, v in enumerate(qres): - if int(v[0]) == ri: - self.move_cursor(row=i) - break - - self.on_selected() - - def on_selected(self) -> Selected: - row = self.cursor_coordinate.row - if row < self.row_count and row >= 0: - f = File.query.get(int(self.get_row_at(row)[0])) - self.post_message(self.Selected(f)) - - def watch_cursor_coordinate(self, old, value) -> None: - super().watch_cursor_coordinate(old, value) - if old != value: - self.on_selected() + async def watch_cursor_cell(self, old, value) -> None: + super().watch_cursor_cell(old, value) + if value[0] < len(self.data) and value[0] >= 0: + f = File.query.get(int(self.data[value[0]][0])) + await self.emit(self.Selected(self, f)) def on_click(self, event: events.Click) -> None: + super().on_click(event) meta = self.get_style_at(event.x, event.y).meta if meta: if meta["row"] == -1: diff --git a/modui/mime.py b/modui/mime.py index e693ebe..fae51af 100644 --- a/modui/mime.py +++ b/modui/mime.py @@ -2,46 +2,46 @@ from enum import Enum from textual import log mimemoji = { - "audio": "🔈", - "video": "🎞", - "text": "📄", - "image": "🖼", - "application/zip": "🗜️", - "application/x-zip-compressed": "🗜️", - "application/x-tar": "🗄", - "application/x-cpio": "🗄", - "application/x-xz": "🗜️", - "application/x-7z-compressed": "🗜️", - "application/gzip": "🗜️", - "application/zstd": "🗜️", - "application/x-rar": "🗜️", - "application/x-rar-compressed": "🗜️", - "application/vnd.ms-cab-compressed": "🗜️", - "application/x-bzip2": "🗜️", - "application/x-lzip": "🗜️", - "application/x-iso9660-image": "💿", - "application/pdf": "📕", - "application/epub+zip": "📕", - "application/mxf": "🎞", - "application/vnd.android.package-archive": "📦", - "application/vnd.debian.binary-package": "📦", - "application/x-rpm": "📦", - "application/x-dosexec": "⚙", - "application/x-execuftable": "⚙", - "application/x-sharedlib": "⚙", - "application/java-archive": "☕", - "application/x-qemu-disk": "🖴", - "application/pgp-encrypted": "🔏", + "audio" : "🔈", + "video" : "🎞", + "text" : "📄", + "image" : "🖼", + "application/zip" : "🗜️", + "application/x-zip-compressed" : "🗜️", + "application/x-tar" : "🗄", + "application/x-cpio" : "🗄", + "application/x-xz" : "🗜️", + "application/x-7z-compressed" : "🗜️", + "application/gzip" : "🗜️", + "application/zstd" : "🗜️", + "application/x-rar" : "🗜️", + "application/x-rar-compressed" : "🗜️", + "application/vnd.ms-cab-compressed" : "🗜️", + "application/x-bzip2" : "🗜️", + "application/x-lzip" : "🗜️", + "application/x-iso9660-image" : "💿", + "application/pdf" : "📕", + "application/epub+zip" : "📕", + "application/mxf" : "🎞", + "application/vnd.android.package-archive" : "📦", + "application/vnd.debian.binary-package" : "📦", + "application/x-rpm" : "📦", + "application/x-dosexec" : "⚙", + "application/x-execuftable" : "⚙", + "application/x-sharedlib" : "⚙", + "application/java-archive" : "☕", + "application/x-qemu-disk" : "🖴", + "application/pgp-encrypted" : "🔏", } -MIMECategory = Enum("MIMECategory", ["Archive", "Text", "AV", "Document", - "Fallback"]) - +MIMECategory = Enum("MIMECategory", + ["Archive", "Text", "AV", "Document", "Fallback"] +) class MIMEHandler: def __init__(self): self.handlers = { - MIMECategory.Archive: [[ + MIMECategory.Archive : [[ "application/zip", "application/x-zip-compressed", "application/x-tar", @@ -62,31 +62,31 @@ class MIMEHandler: "application/java-archive", "application/vnd.openxmlformats" ], []], - MIMECategory.Text: [[ + MIMECategory.Text : [[ "text", "application/json", "application/xml", ], []], - MIMECategory.AV: [[ + MIMECategory.AV : [[ "audio", "video", "image", "application/mxf" ], []], - MIMECategory.Document: [[ + MIMECategory.Document : [[ "application/pdf", "application/epub", "application/x-mobipocket-ebook", ], []], - MIMECategory.Fallback: [[], []] + MIMECategory.Fallback : [[], []] } self.exceptions = { - MIMECategory.Archive: { - ".cbz": MIMECategory.Document, - ".xps": MIMECategory.Document, - ".epub": MIMECategory.Document, + MIMECategory.Archive : { + ".cbz" : MIMECategory.Document, + ".xps" : MIMECategory.Document, + ".epub" : MIMECategory.Document, }, - MIMECategory.Text: { - ".fb2": MIMECategory.Document, + MIMECategory.Text : { + ".fb2" : MIMECategory.Document, } } @@ -115,14 +115,12 @@ class MIMEHandler: cat = getcat(mime) for handler in self.handlers[cat][1]: try: - if handler(cat): - return + if handler(cat): return except: pass for handler in self.handlers[MIMECategory.Fallback][1]: try: - if handler(None): - return + if handler(None): return except: pass raise RuntimeError(f"Unhandled MIME type category: {cat}") diff --git a/modui/mpvwidget.py b/modui/mpvwidget.py index 80c162f..50e5859 100644 --- a/modui/mpvwidget.py +++ b/modui/mpvwidget.py @@ -1,9 +1,5 @@ import time - -import fcntl -import struct -import termios - +import fcntl, struct, termios from sys import stdout from textual import events, log @@ -11,7 +7,6 @@ from textual.widgets import Static from fhost import app as fhost_app - class MpvWidget(Static): def __init__(self, **kwargs): super().__init__(**kwargs) @@ -19,10 +14,8 @@ class MpvWidget(Static): self.mpv = None self.vo = fhost_app.config.get("MOD_PREVIEW_PROTO") - if self.vo not in ["sixel", "kitty"]: - self.update("⚠ Previews not enabled. \n\nSet MOD_PREVIEW_PROTO " - "to 'sixel' or 'kitty' in config.py,\nwhichever is " - "supported by your terminal.") + if not self.vo in ["sixel", "kitty"]: + self.update("⚠ Previews not enabled. \n\nSet MOD_PREVIEW_PROTO to 'sixel' or 'kitty' in config.py,\nwhichever is supported by your terminal.") else: try: import mpv @@ -34,35 +27,28 @@ class MpvWidget(Static): self.mpv[f"vo-sixel-buffered"] = True self.mpv["audio"] = False self.mpv["loop-file"] = "inf" - self.mpv["image-display-duration"] = 0.5 \ - if self.vo == "sixel" else "inf" + self.mpv["image-display-duration"] = 0.5 if self.vo == "sixel" else "inf" except Exception as e: self.mpv = None - self.update("⚠ Previews require python-mpv with libmpv " - "0.36.0 or later \n\nError was:\n" - f"{type(e).__name__}: {e}") + self.update(f"⚠ Previews require python-mpv with libmpv 0.36.0 or later \n\nError was:\n{type(e).__name__}: {e}") - def start_mpv(self, f: str | None = None, - pos: float | str | None = None) -> None: + def start_mpv(self, f: str|None = None, pos: float|str|None = None) -> None: self.display = True self.screen._refresh_layout() if self.mpv: if self.content_region.x: - winsz = fcntl.ioctl(0, termios.TIOCGWINSZ, '12345678') - r, c, w, h = struct.unpack('hhhh', winsz) + r, c, w, h = struct.unpack('hhhh', fcntl.ioctl(0, termios.TIOCGWINSZ, '12345678')) width = int((w / c) * self.content_region.width) - height = int((h / r) * (self.content_region.height + - (1 if self.vo == "sixel" else 0))) + height = int((h / r) * (self.content_region.height + (1 if self.vo == "sixel" else 0))) self.mpv[f"vo-{self.vo}-left"] = self.content_region.x + 1 self.mpv[f"vo-{self.vo}-top"] = self.content_region.y + 1 - self.mpv[f"vo-{self.vo}-rows"] = self.content_region.height + \ - (1 if self.vo == "sixel" else 0) + self.mpv[f"vo-{self.vo}-rows"] = self.content_region.height + (1 if self.vo == "sixel" else 0) self.mpv[f"vo-{self.vo}-cols"] = self.content_region.width self.mpv[f"vo-{self.vo}-width"] = width self.mpv[f"vo-{self.vo}-height"] = height - if pos is not None: + if pos != None: self.mpv["start"] = pos if f: diff --git a/modui/notification.py b/modui/notification.py index 4866130..ecae6e4 100644 --- a/modui/notification.py +++ b/modui/notification.py @@ -1,6 +1,5 @@ from textual.widgets import Static - class Notification(Static): def on_mount(self) -> None: self.set_timer(3, self.remove) diff --git a/nsfw_detect.py b/nsfw_detect.py index 96fa9a2..032f7e4 100755 --- a/nsfw_detect.py +++ b/nsfw_detect.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ - Copyright © 2024 Mia Herkt + Copyright © 2020 Mia Herkt Licensed under the EUPL, Version 1.2 or - as soon as approved by the European Commission - subsequent versions of the EUPL (the "License"); @@ -18,32 +18,81 @@ and limitations under the License. """ +import numpy as np +import os import sys -import av -from transformers import pipeline +from io import BytesIO +from pathlib import Path +os.environ["GLOG_minloglevel"] = "2" # seriously :| +import caffe +import av +av.logging.set_level(av.logging.PANIC) class NSFWDetector: def __init__(self): - self.classifier = pipeline("image-classification", - model="giacomoarienti/nsfw-classifier") + npath = Path(__file__).parent / "nsfw_model" + self.nsfw_net = caffe.Net( + str(npath / "deploy.prototxt"), + caffe.TEST, + weights = str(npath / "resnet_50_1by2_nsfw.caffemodel") + ) + self.caffe_transformer = caffe.io.Transformer({ + 'data': self.nsfw_net.blobs['data'].data.shape + }) + # move image channels to outermost + self.caffe_transformer.set_transpose('data', (2, 0, 1)) + # subtract the dataset-mean value in each channel + self.caffe_transformer.set_mean('data', np.array([104, 117, 123])) + # rescale from [0, 1] to [0, 255] + self.caffe_transformer.set_raw_scale('data', 255) + # swap channels from RGB to BGR + self.caffe_transformer.set_channel_swap('data', (2, 1, 0)) + + def _compute(self, img): + image = caffe.io.load_image(img) + + H, W, _ = image.shape + _, _, h, w = self.nsfw_net.blobs["data"].data.shape + h_off = int(max((H - h) / 2, 0)) + w_off = int(max((W - w) / 2, 0)) + crop = image[h_off:h_off + h, w_off:w_off + w, :] + + transformed_image = self.caffe_transformer.preprocess('data', crop) + transformed_image.shape = (1,) + transformed_image.shape + + input_name = self.nsfw_net.inputs[0] + output_layers = ["prob"] + all_outputs = self.nsfw_net.forward_all( + blobs=output_layers, **{input_name: transformed_image}) + + outputs = all_outputs[output_layers[0]][0].astype(float) + + return outputs def detect(self, fpath): try: with av.open(fpath) as container: - try: - container.seek(int(container.duration / 2)) + try: container.seek(int(container.duration / 2)) except: container.seek(0) frame = next(container.decode(video=0)) - img = frame.to_image() - res = self.classifier(img) - return max([x["score"] for x in res - if x["label"] not in ["neutral", "drawings"]]) - except: pass + if frame.width >= frame.height: + w = 256 + h = int(frame.height * (256 / frame.width)) + else: + w = int(frame.width * (256 / frame.height)) + h = 256 + frame = frame.reformat(width=w, height=h, format="rgb24") + img = BytesIO() + frame.to_image().save(img, format="ppm") - return -1.0 + scores = self._compute(img) + except: + return -1.0 + + return scores[1] if __name__ == "__main__": diff --git a/nsfw_model/LICENSE.md b/nsfw_model/LICENSE.md new file mode 100644 index 0000000..d1124b0 --- /dev/null +++ b/nsfw_model/LICENSE.md @@ -0,0 +1,11 @@ + +Copyright 2016, Yahoo Inc. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/nsfw_model/deploy.prototxt b/nsfw_model/deploy.prototxt new file mode 100644 index 0000000..16fb53e --- /dev/null +++ b/nsfw_model/deploy.prototxt @@ -0,0 +1,3488 @@ +name: "ResNet_50_1by2_nsfw" +layer { + name: "data" + type: "Input" + top: "data" + input_param { shape: { dim: 1 dim: 3 dim: 224 dim: 224 } } +} +layer { + name: "conv_1" + type: "Convolution" + bottom: "data" + top: "conv_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 3 + kernel_size: 7 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_1" + type: "BatchNorm" + bottom: "conv_1" + top: "conv_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_1" + type: "Scale" + bottom: "conv_1" + top: "conv_1" + scale_param { + bias_term: true + } +} +layer { + name: "relu_1" + type: "ReLU" + bottom: "conv_1" + top: "conv_1" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv_1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv_stage0_block0_proj_shortcut" + type: "Convolution" + bottom: "pool1" + top: "conv_stage0_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage0_block0_proj_shortcut" + top: "conv_stage0_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage0_block0_proj_shortcut" + top: "conv_stage0_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage0_block0_branch2a" + type: "Convolution" + bottom: "pool1" + top: "conv_stage0_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_branch2a" + type: "Scale" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block0_branch2a" + type: "ReLU" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2a" +} +layer { + name: "conv_stage0_block0_branch2b" + type: "Convolution" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_branch2b" + type: "Scale" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block0_branch2b" + type: "ReLU" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2b" +} +layer { + name: "conv_stage0_block0_branch2c" + type: "Convolution" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage0_block0_branch2c" + top: "conv_stage0_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_branch2c" + type: "Scale" + bottom: "conv_stage0_block0_branch2c" + top: "conv_stage0_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage0_block0" + type: "Eltwise" + bottom: "conv_stage0_block0_proj_shortcut" + bottom: "conv_stage0_block0_branch2c" + top: "eltwise_stage0_block0" +} +layer { + name: "relu_stage0_block0" + type: "ReLU" + bottom: "eltwise_stage0_block0" + top: "eltwise_stage0_block0" +} +layer { + name: "conv_stage0_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage0_block0" + top: "conv_stage0_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block1_branch2a" + type: "Scale" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block1_branch2a" + type: "ReLU" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2a" +} +layer { + name: "conv_stage0_block1_branch2b" + type: "Convolution" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block1_branch2b" + type: "Scale" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block1_branch2b" + type: "ReLU" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2b" +} +layer { + name: "conv_stage0_block1_branch2c" + type: "Convolution" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage0_block1_branch2c" + top: "conv_stage0_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block1_branch2c" + type: "Scale" + bottom: "conv_stage0_block1_branch2c" + top: "conv_stage0_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage0_block1" + type: "Eltwise" + bottom: "eltwise_stage0_block0" + bottom: "conv_stage0_block1_branch2c" + top: "eltwise_stage0_block1" +} +layer { + name: "relu_stage0_block1" + type: "ReLU" + bottom: "eltwise_stage0_block1" + top: "eltwise_stage0_block1" +} +layer { + name: "conv_stage0_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage0_block1" + top: "conv_stage0_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block2_branch2a" + type: "Scale" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block2_branch2a" + type: "ReLU" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2a" +} +layer { + name: "conv_stage0_block2_branch2b" + type: "Convolution" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block2_branch2b" + type: "Scale" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block2_branch2b" + type: "ReLU" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2b" +} +layer { + name: "conv_stage0_block2_branch2c" + type: "Convolution" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage0_block2_branch2c" + top: "conv_stage0_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block2_branch2c" + type: "Scale" + bottom: "conv_stage0_block2_branch2c" + top: "conv_stage0_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage0_block2" + type: "Eltwise" + bottom: "eltwise_stage0_block1" + bottom: "conv_stage0_block2_branch2c" + top: "eltwise_stage0_block2" +} +layer { + name: "relu_stage0_block2" + type: "ReLU" + bottom: "eltwise_stage0_block2" + top: "eltwise_stage0_block2" +} +layer { + name: "conv_stage1_block0_proj_shortcut" + type: "Convolution" + bottom: "eltwise_stage0_block2" + top: "conv_stage1_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage1_block0_proj_shortcut" + top: "conv_stage1_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage1_block0_proj_shortcut" + top: "conv_stage1_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage1_block0_branch2a" + type: "Convolution" + bottom: "eltwise_stage0_block2" + top: "conv_stage1_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_branch2a" + type: "Scale" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block0_branch2a" + type: "ReLU" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2a" +} +layer { + name: "conv_stage1_block0_branch2b" + type: "Convolution" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_branch2b" + type: "Scale" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block0_branch2b" + type: "ReLU" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2b" +} +layer { + name: "conv_stage1_block0_branch2c" + type: "Convolution" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block0_branch2c" + top: "conv_stage1_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_branch2c" + type: "Scale" + bottom: "conv_stage1_block0_branch2c" + top: "conv_stage1_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block0" + type: "Eltwise" + bottom: "conv_stage1_block0_proj_shortcut" + bottom: "conv_stage1_block0_branch2c" + top: "eltwise_stage1_block0" +} +layer { + name: "relu_stage1_block0" + type: "ReLU" + bottom: "eltwise_stage1_block0" + top: "eltwise_stage1_block0" +} +layer { + name: "conv_stage1_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block0" + top: "conv_stage1_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block1_branch2a" + type: "Scale" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block1_branch2a" + type: "ReLU" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2a" +} +layer { + name: "conv_stage1_block1_branch2b" + type: "Convolution" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block1_branch2b" + type: "Scale" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block1_branch2b" + type: "ReLU" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2b" +} +layer { + name: "conv_stage1_block1_branch2c" + type: "Convolution" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block1_branch2c" + top: "conv_stage1_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block1_branch2c" + type: "Scale" + bottom: "conv_stage1_block1_branch2c" + top: "conv_stage1_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block1" + type: "Eltwise" + bottom: "eltwise_stage1_block0" + bottom: "conv_stage1_block1_branch2c" + top: "eltwise_stage1_block1" +} +layer { + name: "relu_stage1_block1" + type: "ReLU" + bottom: "eltwise_stage1_block1" + top: "eltwise_stage1_block1" +} +layer { + name: "conv_stage1_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block1" + top: "conv_stage1_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block2_branch2a" + type: "Scale" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block2_branch2a" + type: "ReLU" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2a" +} +layer { + name: "conv_stage1_block2_branch2b" + type: "Convolution" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block2_branch2b" + type: "Scale" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block2_branch2b" + type: "ReLU" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2b" +} +layer { + name: "conv_stage1_block2_branch2c" + type: "Convolution" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block2_branch2c" + top: "conv_stage1_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block2_branch2c" + type: "Scale" + bottom: "conv_stage1_block2_branch2c" + top: "conv_stage1_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block2" + type: "Eltwise" + bottom: "eltwise_stage1_block1" + bottom: "conv_stage1_block2_branch2c" + top: "eltwise_stage1_block2" +} +layer { + name: "relu_stage1_block2" + type: "ReLU" + bottom: "eltwise_stage1_block2" + top: "eltwise_stage1_block2" +} +layer { + name: "conv_stage1_block3_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block2" + top: "conv_stage1_block3_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block3_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block3_branch2a" + type: "Scale" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block3_branch2a" + type: "ReLU" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2a" +} +layer { + name: "conv_stage1_block3_branch2b" + type: "Convolution" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block3_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block3_branch2b" + type: "Scale" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block3_branch2b" + type: "ReLU" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2b" +} +layer { + name: "conv_stage1_block3_branch2c" + type: "Convolution" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block3_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block3_branch2c" + top: "conv_stage1_block3_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block3_branch2c" + type: "Scale" + bottom: "conv_stage1_block3_branch2c" + top: "conv_stage1_block3_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block3" + type: "Eltwise" + bottom: "eltwise_stage1_block2" + bottom: "conv_stage1_block3_branch2c" + top: "eltwise_stage1_block3" +} +layer { + name: "relu_stage1_block3" + type: "ReLU" + bottom: "eltwise_stage1_block3" + top: "eltwise_stage1_block3" +} +layer { + name: "conv_stage2_block0_proj_shortcut" + type: "Convolution" + bottom: "eltwise_stage1_block3" + top: "conv_stage2_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage2_block0_proj_shortcut" + top: "conv_stage2_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage2_block0_proj_shortcut" + top: "conv_stage2_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage2_block0_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block3" + top: "conv_stage2_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_branch2a" + type: "Scale" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block0_branch2a" + type: "ReLU" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2a" +} +layer { + name: "conv_stage2_block0_branch2b" + type: "Convolution" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_branch2b" + type: "Scale" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block0_branch2b" + type: "ReLU" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2b" +} +layer { + name: "conv_stage2_block0_branch2c" + type: "Convolution" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block0_branch2c" + top: "conv_stage2_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_branch2c" + type: "Scale" + bottom: "conv_stage2_block0_branch2c" + top: "conv_stage2_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block0" + type: "Eltwise" + bottom: "conv_stage2_block0_proj_shortcut" + bottom: "conv_stage2_block0_branch2c" + top: "eltwise_stage2_block0" +} +layer { + name: "relu_stage2_block0" + type: "ReLU" + bottom: "eltwise_stage2_block0" + top: "eltwise_stage2_block0" +} +layer { + name: "conv_stage2_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block0" + top: "conv_stage2_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block1_branch2a" + type: "Scale" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block1_branch2a" + type: "ReLU" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2a" +} +layer { + name: "conv_stage2_block1_branch2b" + type: "Convolution" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block1_branch2b" + type: "Scale" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block1_branch2b" + type: "ReLU" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2b" +} +layer { + name: "conv_stage2_block1_branch2c" + type: "Convolution" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block1_branch2c" + top: "conv_stage2_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block1_branch2c" + type: "Scale" + bottom: "conv_stage2_block1_branch2c" + top: "conv_stage2_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block1" + type: "Eltwise" + bottom: "eltwise_stage2_block0" + bottom: "conv_stage2_block1_branch2c" + top: "eltwise_stage2_block1" +} +layer { + name: "relu_stage2_block1" + type: "ReLU" + bottom: "eltwise_stage2_block1" + top: "eltwise_stage2_block1" +} +layer { + name: "conv_stage2_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block1" + top: "conv_stage2_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block2_branch2a" + type: "Scale" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block2_branch2a" + type: "ReLU" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2a" +} +layer { + name: "conv_stage2_block2_branch2b" + type: "Convolution" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block2_branch2b" + type: "Scale" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block2_branch2b" + type: "ReLU" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2b" +} +layer { + name: "conv_stage2_block2_branch2c" + type: "Convolution" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block2_branch2c" + top: "conv_stage2_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block2_branch2c" + type: "Scale" + bottom: "conv_stage2_block2_branch2c" + top: "conv_stage2_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block2" + type: "Eltwise" + bottom: "eltwise_stage2_block1" + bottom: "conv_stage2_block2_branch2c" + top: "eltwise_stage2_block2" +} +layer { + name: "relu_stage2_block2" + type: "ReLU" + bottom: "eltwise_stage2_block2" + top: "eltwise_stage2_block2" +} +layer { + name: "conv_stage2_block3_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block2" + top: "conv_stage2_block3_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block3_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block3_branch2a" + type: "Scale" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block3_branch2a" + type: "ReLU" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2a" +} +layer { + name: "conv_stage2_block3_branch2b" + type: "Convolution" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block3_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block3_branch2b" + type: "Scale" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block3_branch2b" + type: "ReLU" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2b" +} +layer { + name: "conv_stage2_block3_branch2c" + type: "Convolution" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block3_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block3_branch2c" + top: "conv_stage2_block3_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block3_branch2c" + type: "Scale" + bottom: "conv_stage2_block3_branch2c" + top: "conv_stage2_block3_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block3" + type: "Eltwise" + bottom: "eltwise_stage2_block2" + bottom: "conv_stage2_block3_branch2c" + top: "eltwise_stage2_block3" +} +layer { + name: "relu_stage2_block3" + type: "ReLU" + bottom: "eltwise_stage2_block3" + top: "eltwise_stage2_block3" +} +layer { + name: "conv_stage2_block4_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block3" + top: "conv_stage2_block4_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block4_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block4_branch2a" + type: "Scale" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block4_branch2a" + type: "ReLU" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2a" +} +layer { + name: "conv_stage2_block4_branch2b" + type: "Convolution" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block4_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block4_branch2b" + type: "Scale" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block4_branch2b" + type: "ReLU" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2b" +} +layer { + name: "conv_stage2_block4_branch2c" + type: "Convolution" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block4_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block4_branch2c" + top: "conv_stage2_block4_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block4_branch2c" + type: "Scale" + bottom: "conv_stage2_block4_branch2c" + top: "conv_stage2_block4_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block4" + type: "Eltwise" + bottom: "eltwise_stage2_block3" + bottom: "conv_stage2_block4_branch2c" + top: "eltwise_stage2_block4" +} +layer { + name: "relu_stage2_block4" + type: "ReLU" + bottom: "eltwise_stage2_block4" + top: "eltwise_stage2_block4" +} +layer { + name: "conv_stage2_block5_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block4" + top: "conv_stage2_block5_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block5_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block5_branch2a" + type: "Scale" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block5_branch2a" + type: "ReLU" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2a" +} +layer { + name: "conv_stage2_block5_branch2b" + type: "Convolution" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block5_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block5_branch2b" + type: "Scale" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block5_branch2b" + type: "ReLU" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2b" +} +layer { + name: "conv_stage2_block5_branch2c" + type: "Convolution" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block5_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block5_branch2c" + top: "conv_stage2_block5_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block5_branch2c" + type: "Scale" + bottom: "conv_stage2_block5_branch2c" + top: "conv_stage2_block5_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block5" + type: "Eltwise" + bottom: "eltwise_stage2_block4" + bottom: "conv_stage2_block5_branch2c" + top: "eltwise_stage2_block5" +} +layer { + name: "relu_stage2_block5" + type: "ReLU" + bottom: "eltwise_stage2_block5" + top: "eltwise_stage2_block5" +} +layer { + name: "conv_stage3_block0_proj_shortcut" + type: "Convolution" + bottom: "eltwise_stage2_block5" + top: "conv_stage3_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage3_block0_proj_shortcut" + top: "conv_stage3_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage3_block0_proj_shortcut" + top: "conv_stage3_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage3_block0_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block5" + top: "conv_stage3_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_branch2a" + type: "Scale" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block0_branch2a" + type: "ReLU" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2a" +} +layer { + name: "conv_stage3_block0_branch2b" + type: "Convolution" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_branch2b" + type: "Scale" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block0_branch2b" + type: "ReLU" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2b" +} +layer { + name: "conv_stage3_block0_branch2c" + type: "Convolution" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage3_block0_branch2c" + top: "conv_stage3_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_branch2c" + type: "Scale" + bottom: "conv_stage3_block0_branch2c" + top: "conv_stage3_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage3_block0" + type: "Eltwise" + bottom: "conv_stage3_block0_proj_shortcut" + bottom: "conv_stage3_block0_branch2c" + top: "eltwise_stage3_block0" +} +layer { + name: "relu_stage3_block0" + type: "ReLU" + bottom: "eltwise_stage3_block0" + top: "eltwise_stage3_block0" +} +layer { + name: "conv_stage3_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage3_block0" + top: "conv_stage3_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block1_branch2a" + type: "Scale" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block1_branch2a" + type: "ReLU" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2a" +} +layer { + name: "conv_stage3_block1_branch2b" + type: "Convolution" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block1_branch2b" + type: "Scale" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block1_branch2b" + type: "ReLU" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2b" +} +layer { + name: "conv_stage3_block1_branch2c" + type: "Convolution" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage3_block1_branch2c" + top: "conv_stage3_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block1_branch2c" + type: "Scale" + bottom: "conv_stage3_block1_branch2c" + top: "conv_stage3_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage3_block1" + type: "Eltwise" + bottom: "eltwise_stage3_block0" + bottom: "conv_stage3_block1_branch2c" + top: "eltwise_stage3_block1" +} +layer { + name: "relu_stage3_block1" + type: "ReLU" + bottom: "eltwise_stage3_block1" + top: "eltwise_stage3_block1" +} +layer { + name: "conv_stage3_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage3_block1" + top: "conv_stage3_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block2_branch2a" + type: "Scale" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block2_branch2a" + type: "ReLU" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2a" +} +layer { + name: "conv_stage3_block2_branch2b" + type: "Convolution" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block2_branch2b" + type: "Scale" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block2_branch2b" + type: "ReLU" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2b" +} +layer { + name: "conv_stage3_block2_branch2c" + type: "Convolution" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage3_block2_branch2c" + top: "conv_stage3_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block2_branch2c" + type: "Scale" + bottom: "conv_stage3_block2_branch2c" + top: "conv_stage3_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage3_block2" + type: "Eltwise" + bottom: "eltwise_stage3_block1" + bottom: "conv_stage3_block2_branch2c" + top: "eltwise_stage3_block2" +} +layer { + name: "relu_stage3_block2" + type: "ReLU" + bottom: "eltwise_stage3_block2" + top: "eltwise_stage3_block2" +} +layer { + name: "pool" + type: "Pooling" + bottom: "eltwise_stage3_block2" + top: "pool" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 1 + } +} +layer { + name: "fc_nsfw" + type: "InnerProduct" + bottom: "pool" + top: "fc_nsfw" + param { + lr_mult: 5 + decay_mult: 1 + } + param { + lr_mult: 10 + decay_mult: 0 + } + inner_product_param{ + num_output: 2 + weight_filler { + type: "xavier" + std: 0.01 + } + bias_filler { + type: "xavier" + value: 0 + } + } +} +layer { + name: "prob" + type: "Softmax" + bottom: "fc_nsfw" + top: "prob" +} + diff --git a/nsfw_model/resnet_50_1by2_nsfw.caffemodel b/nsfw_model/resnet_50_1by2_nsfw.caffemodel new file mode 100644 index 0000000..c4f3105 Binary files /dev/null and b/nsfw_model/resnet_50_1by2_nsfw.caffemodel differ diff --git a/requirements.txt b/requirements.txt index 8924245..b765697 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,15 +7,12 @@ Jinja2 Flask flask_sqlalchemy python_magic -ipaddress # vscan clamd # nsfw detection -torch -transformers -pillow +numpy # mod ui av diff --git a/templates/403.html b/templates/403.html deleted file mode 100644 index fcffa28..0000000 --- a/templates/403.html +++ /dev/null @@ -1 +0,0 @@ -{{ description if description else "Your host is banned." }} diff --git a/templates/index.html b/templates/index.html index 257dd49..a2add39 100644 --- a/templates/index.html +++ b/templates/index.html @@ -37,6 +37,7 @@ To change the expiration date (see above): {% set max_size = config["MAX_CONTENT_LENGTH"]|filesizeformat(True) %} Maximum file size: {{ max_size }} +Not allowed: {{ config["FHOST_MIME_BLACKLIST"]|join(", ") }} FILE RETENTION PERIOD diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..0b29e00 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,81 @@ +import pytest +import tempfile +import os +from flask_migrate import upgrade as db_upgrade +from io import BytesIO + +from fhost import app, db, url_for, File, URL + +@pytest.fixture +def client(): + with tempfile.TemporaryDirectory() as tmpdir: + app.config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{tmpdir}/db.sqlite" + app.config["FHOST_STORAGE_PATH"] = os.path.join(tmpdir, "up") + app.config["TESTING"] = True + + with app.test_client() as client: + with app.app_context(): + db_upgrade() + yield client + +def test_client(client): + payloads = [ + ({ "file" : (BytesIO(b"hello"), "hello.txt") }, 200, b"https://localhost/E.txt\n"), + ({ "file" : (BytesIO(b"hello"), "hello.ignorethis") }, 200, b"https://localhost/E.txt\n"), + ({ "file" : (BytesIO(b"bye"), "bye.truncatethis") }, 200, b"https://localhost/Q.truncate\n"), + ({ "file" : (BytesIO(b"hi"), "hi.tar.gz") }, 200, b"https://localhost/h.tar.gz\n"), + ({ "file" : (BytesIO(b"lea!"), "lea!") }, 200, b"https://localhost/d.txt\n"), + ({ "file" : (BytesIO(b"why?"), "balls", "application/x-dosexec") }, 415, None), + ({ "shorten" : "https://0x0.st" }, 200, b"https://localhost/E\n"), + ({ "shorten" : "https://localhost" }, 400, None), + ({}, 400, None), + ] + + for p, s, r in payloads: + rv = client.post("/", buffered=True, + content_type="multipart/form-data", + data=p) + assert rv.status_code == s + if r: + assert rv.data == r + + f = File.query.get(2) + f.removed = True + db.session.add(f) + db.session.commit() + + rq = [ + (200, [ + "/", + "robots.txt", + "E.txt", + "E.txt/test", + "E.txt/test.py", + "d.txt", + "h.tar.gz", + ]), + (302, [ + "E", + ]), + (404, [ + "test.bin", + "test.bin/test", + "test.bin/test.py", + "test", + "test/test", + "test.bin/test.py", + "E.bin", + "E/test", + "E/test.bin", + ]), + (451, [ + "Q.truncate", + ]), + ] + + for code, paths in rq: + for p in paths: + app.logger.info(f"GET {p}") + rv = client.get(p) + assert rv.status_code == code +