├── .gitignore
├── Procfile
├── README.md
├── illegal_tokens.json
├── main.py
├── nsfw_words.json
├── requirements.txt
├── sql_db
├── __init__.py
├── downloads.py
├── images.py
├── rankings.py
├── user_score.py
└── users.py
├── static
├── introjs-modern.css
├── normalize.css
└── style.css
├── templates
└── index.html
└── utils
├── __init__.py
└── logging_utils.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | **/__pycache__
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | bin/
11 | build/
12 | develop-eggs/
13 | dist/
14 | eggs/
15 | lib/
16 | lib64/
17 | parts/
18 | sdist/
19 | var/
20 | *.egg-info/
21 | .installed.cfg
22 | *.egg
23 |
24 | # Installer logs
25 | pip-log.txt
26 | pip-delete-this-directory.txt
27 |
28 | # Unit test / coverage reports
29 | .tox/
30 | .coverage
31 | .cache
32 | nosetests.xml
33 | coverage.xml
34 |
35 | # Translations
36 | *.mo
37 |
38 | # Mr Developer
39 | .mr.developer.cfg
40 | .project
41 | .pydevproject
42 |
43 | # Rope
44 | .ropeproject
45 |
46 | # Django stuff:
47 | *.log
48 | *.pot
49 |
50 | # Sphinx documentation
51 | docs/_build/
52 |
53 | # jupyter
54 | .ipynb_checkpoints/
55 |
56 | # env
57 | .env
--------------------------------------------------------------------------------
/Procfile:
--------------------------------------------------------------------------------
1 | web: gunicorn -w 1 -t 50 -k uvicorn.workers.UvicornWorker main:app
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Installation
2 |
3 | Create an env and run:
4 | ```bash
5 | pip install -r requirements.txt
6 | ```
7 |
8 | # Run locally
9 | ```bash
10 | uvicorn main:app --reload --port 8080
11 | ```
12 |
13 | # Run on Heroku
14 | Simply go to the heroku app and press deploy.
15 |
16 | # Pull Requests
17 | Please send us pull requests so we can improve the app!
--------------------------------------------------------------------------------
/illegal_tokens.json:
--------------------------------------------------------------------------------
1 | [
2 | "(",
3 | ")",
4 | "--",
5 | "{",
6 | "}",
7 | "_",
8 | "/",
9 | "::"
10 | ]
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import base64
3 | import collections
4 | import json
5 | import os
6 | import random
7 | import re
8 | import string
9 | import time
10 | import traceback
11 | import uuid
12 | from io import BytesIO
13 | from typing import List, Union, Tuple, Optional
14 | from urllib.parse import urlparse
15 | from fastapi_utils.tasks import repeat_every
16 | import aiohttp
17 | import boto3
18 | import requests
19 | from PIL import Image
20 | from pydantic import BaseModel, Field
21 | from apscheduler.schedulers.background import BackgroundScheduler
22 |
23 | from sql_db.user_score import get_user_score, increment_user_score, create_user_score_table
24 | from sql_db.users import create_user_table, add_user, get_all_users, get_num_users
25 | from sql_db.downloads import add_download, create_downloads_table, DownloadData, get_all_downloads, get_num_downloads
26 | from sql_db.rankings import add_ranking, create_rankings_table, get_all_rankings, RankingData, get_num_rankings
27 | from sql_db.images import add_image, create_image_table, get_all_images, ImageData, get_num_images, \
28 | get_num_images_per_user_last_week
29 | from utils.logging_utils import logger
30 | from authlib.integrations.base_client import OAuthError
31 | from fastapi import FastAPI, BackgroundTasks, Form, HTTPException, WebSocket, Cookie
32 | from starlette.middleware.sessions import SessionMiddleware
33 | from starlette.responses import HTMLResponse, RedirectResponse
34 | from starlette.requests import Request
35 | from authlib.integrations.starlette_client import OAuth
36 | from starlette.config import Config
37 | from starlette.staticfiles import StaticFiles
38 | from starlette.templating import Jinja2Templates
39 | from aiocache import Cache
40 | from aiocache.serializers import PickleSerializer
41 | from aiocache.lock import RedLock
42 | import tweepy
43 | from starlette_discord import DiscordOAuthClient
44 |
45 | # DUMMY_IMG_URL = f"https://loremflickr.com/512/512"
46 | app = FastAPI()
47 | app.add_middleware(SessionMiddleware, secret_key="!secret")
48 | app.mount("/static", StaticFiles(directory="static"), name="static")
49 | templates = Jinja2Templates(directory="templates")
50 |
51 | config = Config('.env')
52 | oauth = OAuth(config)
53 |
54 | CONF_URL = 'https://accounts.google.com/.well-known/openid-configuration'
55 | oauth.register(
56 | name='google',
57 | server_metadata_url=CONF_URL,
58 | client_kwargs={
59 | 'scope': 'openid email profile'
60 | }
61 | )
62 |
63 | BACKEND_URLS = json.loads(os.environ["BACKEND_URLS"])
64 | app.backend_urls = BACKEND_URLS[:]
65 | num_urls = len(app.backend_urls)
66 | num_urls = 100
67 | MAX_SIZE_IN_QUEUE = num_urls * 1
68 | MAX_SIZE_CONCURRENT = num_urls // 2
69 | logger.debug(f"{MAX_SIZE_IN_QUEUE=} {MAX_SIZE_CONCURRENT=}")
70 |
71 | AWS_ACCESS_KEY = os.environ["AWS_ACCESS_KEY"]
72 | AWS_SECRET_KEY = os.environ["AWS_SECRET_KEY"]
73 | BUCKET_NAME = "text-to-image-human-preferences"
74 | S3_EXTRA_ARGS = {'ACL': 'public-read'}
75 |
76 | REDIS_URL = os.environ.get("REDIS_URL")
77 | url = urlparse(REDIS_URL)
78 |
79 | consumer_key = os.environ['TWITTER_CONSUMER_KEY']
80 | consumer_secret_key = os.environ['TWITTER_CONSUMER_SECRET_KEY']
81 | access_token = os.environ['TWITTER_ACCESS_TOKEN']
82 | access_token_secret = os.environ['TWITTER_ACCESS_TOKEN_SECRET']
83 |
84 | STABILITY_API_KEY = os.environ['STABILITY_API_KEY']
85 | STABILITY_API_HOST = os.environ['STABILITY_API_HOST']
86 | STABILITY_API_HOST2 = os.environ['STABILITY_API_HOST2']
87 | STABILITY_ENGINE_ID_1 = "stable-diffusion-xl-beta-v2-2-2"
88 | STABILITY_ENGINE_ID_2 = "stable-diffusion-xl-beta-v2-2-2"
89 |
90 |
91 | STABILITY_ENGINE_IDS = [
92 | STABILITY_ENGINE_ID_1,
93 | STABILITY_ENGINE_ID_1,
94 | STABILITY_ENGINE_ID_2,
95 | STABILITY_ENGINE_ID_2,
96 | STABILITY_ENGINE_ID_2,
97 | STABILITY_ENGINE_ID_2
98 | ]
99 |
100 | twitter_auth = tweepy.OAuthHandler(consumer_key, consumer_secret_key)
101 | twitter_auth.set_access_token(access_token, access_token_secret)
102 | twitter_api = tweepy.API(twitter_auth)
103 |
104 | discord_client_id = os.environ['DISCORD_CLIENT_ID']
105 | discord_client_secret = os.environ['DISCORD_CLIENT_SECRET']
106 | redirect_uri = "https://pickapic.io/discord_auth"
107 |
108 | discord_client = DiscordOAuthClient(discord_client_id, discord_client_secret, redirect_uri)
109 |
110 | job_id2images = {}
111 | job_id2images_data = {}
112 | finished_job_id2uids = {}
113 | scheduler = BackgroundScheduler()
114 | BLOCKED_IDS = [
115 | 280, 331, 437, 641, 718, 729, 783, 984, 1023, 1040, 1059, 1149, 1187, 1177, 1202, 1203, 1220,
116 | 1230, 1227, 1279, 1405, 1460, 1623, 1627, 1758, 1801, 1907, 1917, 1922, 2071, 2215, 2239, 2286, 2322, 2357, 2452,
117 | 2459, 2481, 2513, 2515, 2520, 2545, 2596, 2603, 2617, 2638, 2709, 2783, 2842, 2266, 2899, 3084, 3138, 3243, 3264,
118 | 3265, 3267, 3251, 3292, 3268, 3271, 1961, 3302, 3318, 1689, 3278, 1382, 3542, 3446, 3633, 1526, 4710, 4748, 4762,
119 | 4444, 4870, 4733, 4878, 4928, 4939, 4926, 4942, 5019, 4946, 5006, 4241, 5027, 5015, 5041, 5032, 5047, 5054, 5064,
120 | 5023, 5137, 5281, 4115, 5273, 4347, 3523, 5403, 3589, 5697, 6574, 6573, 6822, 7037, 7277, 8078, 7995, 3604,
121 | 7947, 7277, 8079, 4565, 7931, 4597, 8118, 8176, 8313, 8285, 6032
122 | ]
123 | BLOCKED_IPS = ["159.138.50.118", "42.2.119.97", "5.28.184.13", "190.167.37.23", "62.102.148.166", "89.209.53.165",
124 | "217.84.145.197", "88.224.35.22"]
125 |
126 | MAX_IMAGES_PER_USER_PER_WEEK = 2000
127 |
128 | CONTROL_IMAGE_UID = "767c0473-32cf-4d8b-9d82-25412a5f7f6d"
129 | CONTROL_URL = f"https://text-to-image-human-preferences.s3.us-east-2.amazonaws.com/images/{CONTROL_IMAGE_UID}.png"
130 | control_image_bytes = BytesIO()
131 | Image.open(BytesIO(requests.get(CONTROL_URL).content)).save(control_image_bytes, format="PNG")
132 | control_image_bytes = base64.b64encode(control_image_bytes.getvalue())
133 |
134 | nsfw_words = json.load(open("./nsfw_words.json", "r"))
135 | illegal_tokens = json.load(open("./illegal_tokens.json", "r"))
136 |
137 |
138 | class UpdateImageRequest(BaseModel):
139 | image_uid: str
140 | prompt: str
141 | image_uids: List[str]
142 |
143 |
144 | class TweetRequest(BaseModel):
145 | image_uid: str
146 | prompt: str
147 | image_data: str
148 | user_id: str
149 |
150 |
151 | class Job(BaseModel):
152 | prompt: str
153 | job_id: str = Field(default_factory=lambda: str(uuid.uuid4()))
154 | status: str = "queued"
155 | start_time: int = Field(default_factory=lambda: time.time())
156 | image_uids: list = []
157 | progress: int = 0
158 | user_id: str = None
159 |
160 | def __str__(self):
161 | return f"Job(job_id={self.job_id}, status={self.status}, start_time={self.start_time}, image_uids={self.image_uids}, progress={self.progress}, user_id={self.user_id})"
162 |
163 |
164 | async def get_job(job_id: str) -> Job:
165 | job = await app.cache.get(job_id)
166 | return job
167 |
168 |
169 | async def set_job(job_id: str, job: Job):
170 | await app.cache.set(job_id, job)
171 |
172 |
173 | async def clean_job(job_id):
174 | await app.cache.delete(job_id)
175 |
176 |
177 | def is_user_logged(request):
178 | return "user_id" in request.session
179 |
180 |
181 | @app.get('/')
182 | async def homepage(request: Request):
183 | user = request.session.get('user')
184 | ip = request.client.host
185 |
186 | if ip in BLOCKED_IPS:
187 | logger.info(f"Blocking {ip=} {user=}")
188 | user = None
189 |
190 | user_id = "null"
191 | user_score = 0
192 |
193 | if user is not None:
194 | user_id = add_user(user["email"], user["name"])
195 | if user_id in BLOCKED_IDS:
196 | logger.info(f"IP of blocked user {user_id}: {ip=}")
197 |
198 | start = time.time()
199 | user_score = get_user_score(user_id)
200 | print(f"user {user_id} logged in {ip=}")
201 | request.session['user_id'] = user_id
202 |
203 | return templates.TemplateResponse(
204 | "index.html",
205 | {
206 | "request": request,
207 | "is_authenticated": is_user_logged(request),
208 | "user_id": user_id,
209 | "user_score": user_score,
210 | "nsfw_words": nsfw_words,
211 | "illegal_tokens": illegal_tokens,
212 | }
213 | )
214 |
215 |
216 | @app.get('/login')
217 | async def login(request: Request):
218 | redirect_uri = request.url_for('auth')
219 | return await oauth.google.authorize_redirect(request, redirect_uri)
220 |
221 |
222 | @app.get('/discord_login')
223 | async def login(request: Request):
224 | redirect_uri = request.url_for('discord_auth')
225 | print(f"Discord {redirect_uri=}")
226 | discord_client.redirect_uri = redirect_uri
227 | return discord_client.redirect()
228 |
229 |
230 | @app.get('/auth')
231 | async def auth(request: Request):
232 | try:
233 | token = await oauth.google.authorize_access_token(request)
234 | except OAuthError as error:
235 | return HTMLResponse(f'
{error.error} ')
236 | user = token.get('userinfo')
237 | if user:
238 | request.session['user'] = dict(user)
239 | add_user(user["email"], user["name"])
240 | return RedirectResponse(url='/')
241 |
242 |
243 | @app.get('/discord_auth')
244 | async def discord_auth(code: str, request: Request):
245 | try:
246 | user = await discord_client.login(code)
247 | except OAuthError as error:
248 | return HTMLResponse(f'{error.error} ')
249 | print(f"Discord {user=}")
250 | email = str(user.email if user.email else user.id)
251 | if user:
252 | request.session['user'] = {"email": email, "name": user.username}
253 | add_user(email, user.username)
254 | return RedirectResponse(url='/')
255 |
256 |
257 | @app.get('/logout')
258 | async def logout(request: Request):
259 | request.session.pop('user', None)
260 | request.session.pop('user_id', None)
261 | return RedirectResponse(url='/')
262 |
263 |
264 | # async def get_random_images(job):
265 | # job.status = "running"
266 | # logger.debug(f"Getting random images for {job.job_id} with prompt {job.prompt}")
267 | # await asyncio.sleep(10)
268 | # images = []
269 | # for _ in range(4):
270 | # response = requests.get(DUMMY_IMG_URL)
271 | # image = Image.open(BytesIO(response.content))
272 | # buf = BytesIO()
273 | # image.save(buf, format='JPEG')
274 | # # Encode the image data as a base64-encoded string
275 | # image_data = base64.b64encode(buf.getvalue()).decode('utf-8')
276 | # images.append(image_data)
277 | # logger.debug(f"Got random images for {job.job_id} with prompt {job.prompt}")
278 | # job.status = "finished"
279 | # job.images = images
280 | # job.image_uids = [str(uuid.uuid4()) for _ in range(4)]
281 |
282 |
283 | def upload_images(images, image_uids):
284 | s3_client = boto3.client(
285 | 's3',
286 | aws_access_key_id=AWS_ACCESS_KEY,
287 | aws_secret_access_key=AWS_SECRET_KEY,
288 | )
289 | for image, image_uid in zip(images, image_uids):
290 | pil_image = Image.open(BytesIO(base64.b64decode(image)))
291 | image_dir = "images"
292 | os.makedirs(image_dir, exist_ok=True)
293 | path = f"{image_dir}/{image_uid}.png"
294 | pil_image.save(path)
295 | try:
296 | if os.path.exists(path):
297 | s3_client.upload_file(path,
298 | BUCKET_NAME,
299 | path,
300 | ExtraArgs=S3_EXTRA_ARGS)
301 | else:
302 | logger.warning(f"Couldn't upload image {image_uid} - path does not exists={os.path.exists(path)}")
303 | except Exception as e:
304 | logger.error(f"Couldn't upload image {image_uid} - {e}")
305 | if os.path.exists(path):
306 | os.remove(path)
307 |
308 |
309 | def extract_image_data(response_json, image_uids):
310 | image_data = []
311 | for i in range(len(response_json["prompt"])):
312 | image_data.append(
313 | ImageData(
314 | image_uid=image_uids[i],
315 | user_id=response_json["user_id"][i],
316 | prompt=response_json["prompt"][i],
317 | negative_prompt=response_json["negative_prompt"][i],
318 | seed=response_json["seed"][i],
319 | gs=response_json["gs"][i],
320 | steps=response_json["steps"][i],
321 | idx=response_json["idx"][i],
322 | num_generated=response_json["num_generated"][i],
323 | scheduler_cls=response_json["scheduler_cls"][i],
324 | model_id=response_json["model_id"][i]
325 | )
326 | )
327 | return image_data
328 |
329 |
330 | async def get_backend_url_idx():
331 | async with RedLock(app.cache, "backend_url_idx", 1000):
332 | result = await app.cache.get("backend_url_idx")
333 | await app.cache.set("backend_url_idx", result + 1)
334 | return result % len(app.backend_urls)
335 |
336 |
337 | async def get_verified_backend_url(prompt):
338 | verified = False
339 | backend_url = None
340 | while not verified:
341 | backend_url_idx = await get_backend_url_idx()
342 | backend_url = app.backend_urls[backend_url_idx]
343 | try:
344 | response = requests.get(backend_url.replace("generate", ""), timeout=1.5)
345 | if response.status_code == 200:
346 | verified = True
347 | except Exception as e:
348 | app.backend_urls.remove(backend_url)
349 | logger.debug(f"{backend_url=} {prompt=} failed with exception {e}")
350 | continue
351 | return backend_url
352 |
353 |
354 | def remove_square_brackets(prompt: str) -> Tuple[str, Optional[str]]:
355 | match = re.search(r'\[(.+?)\]', prompt)
356 | if match:
357 | return prompt.replace(match.group(), ""), match.group(1)
358 | return prompt.strip(), None
359 |
360 |
361 | async def generate_images(prompt, negative_prompt, num_samples, user_id, backend_url):
362 | start_time = time.time()
363 | async with aiohttp.ClientSession() as session:
364 | has_generated = False
365 | num_tries = 0
366 | while not has_generated:
367 | try:
368 | # logger.debug(f"calling {backend_url} with prompt {prompt}")
369 | async with session.post(backend_url,
370 | json={
371 | "prompt": prompt,
372 | "negative_prompt": negative_prompt,
373 | "num_samples": num_samples,
374 | "user_id": user_id
375 | }) as response:
376 | response_json = await response.json()
377 | has_generated = True
378 | except Exception as e:
379 | await asyncio.sleep(1)
380 | num_tries += 1
381 | logger.error(f"Error #{num_tries} creating images for prompt {prompt} with exception {e}")
382 | logger.error(traceback.format_exc())
383 | if num_tries > 5:
384 | return None
385 | # logger.info(
386 | # f"Generated {num_samples} images with {backend_url} for prompt {prompt} in {time.time() - start_time:.2f} seconds")
387 | return response_json
388 |
389 |
390 | async def generate_images_via_api(prompt, negative_prompt, user_id, engine_id):
391 | start_time = time.time()
392 | async with aiohttp.ClientSession() as session:
393 | has_generated = False
394 | num_tries = 0
395 | seed = random.randint(0, 2147483647)
396 | gs = random.uniform(3, 12)
397 | asscore = 6.75
398 | api_host = f"{STABILITY_API_HOST2}/v1/generation/{engine_id}/text-to-image"
399 | if engine_id == STABILITY_ENGINE_ID_1:
400 | res = 512
401 | else:
402 | res = 1024
403 | height = res
404 | width = res
405 | n_steps = 50
406 | scheduler_cls = "DDIM"
407 |
408 | if "beta" in engine_id:
409 | prompt = "$IPCinline:{" + "\"preset\": \"xlfixer\", " + "\"sdxl_ascore\"" + f":[{asscore},2.5]" + "}$ " + prompt
410 | data = {}
411 | while not has_generated:
412 | try:
413 | async with session.post(
414 | api_host,
415 | timeout=20,
416 | headers={
417 | "Content-Type": "application/json",
418 | "Accept": "application/json",
419 | "Authorization": f"Bearer {STABILITY_API_KEY}"
420 | },
421 | json={
422 | "text_prompts": [
423 | {
424 | "text": prompt,
425 | "weight": 1.0
426 | },
427 | {
428 | "text": negative_prompt,
429 | "weight": -1.0
430 | },
431 | ],
432 | "cfg_scale": gs,
433 | "height": height,
434 | "width": width,
435 | "sampler": scheduler_cls,
436 | "samples": 1,
437 | "steps": n_steps,
438 | "seed": seed,
439 | },
440 | ) as response:
441 | data = await response.json(content_type=None)
442 | image_bytes = [dp['base64'] for dp in data["artifacts"]]
443 | was_filtered = any(dp['finishReason'] == "CONTENT_FILTERED" for dp in data["artifacts"])
444 | if was_filtered:
445 | logger.error(f"{user_id=} {prompt=} FILTERED!!!")
446 | response_json = {
447 | "user_id": user_id,
448 | "prompt": [prompt],
449 | "negative_prompt": [negative_prompt],
450 | "seed": seed,
451 | "gs": [gs],
452 | "steps": n_steps,
453 | "idx": [0],
454 | "num_generated": 1,
455 | "scheduler_cls": scheduler_cls,
456 | "model_id": engine_id if not was_filtered else "CONTENT_FILTERED",
457 | "images": image_bytes
458 | }
459 | has_generated = True
460 | except Exception as e:
461 | await asyncio.sleep(1)
462 | num_tries += 1
463 | # if 'artifacts' not in data and num_tries < 5:
464 | # logger.info(f"No artifacts in response {engine_id=} {prompt=}")
465 | # continue
466 | logger.error(f"Error #{num_tries} creating images for prompt {prompt} with exception {e}")
467 | if 'artifacts' in data:
468 | logger.error(traceback.format_exc())
469 | return None
470 | # logger.info(f"Generated 1 images with {engine_id} for prompt {prompt} in {time.time() - start_time:.2f} seconds")
471 | return response_json
472 |
473 |
474 | async def create_images(prompt, user_id):
475 | prompt, negative_prompt = remove_square_brackets(prompt)
476 | if negative_prompt is None:
477 | negative_prompt = "white border, wall view, ugly, deformed, noisy, blurry, distorted, grainy"
478 |
479 | start = time.time()
480 |
481 | tasks = []
482 |
483 | # num_samples_per_call = 2
484 | # logger.info(f"Starting: {prompt=} | time={time.time() - start:.2f}(sec) | {user_id=}")
485 | # backend_url1 = await get_verified_backend_url(prompt)
486 | # task1 = asyncio.create_task(generate_images(
487 | # prompt=prompt,
488 | # negative_prompt=negative_prompt,
489 | # user_id=user_id,
490 | # num_samples=num_samples_per_call,
491 | # backend_url=backend_url1
492 | # ))
493 | # tasks.append(task1)
494 |
495 | for stability_engine_id in STABILITY_ENGINE_IDS:
496 |
497 | task = asyncio.create_task(generate_images_via_api(
498 | prompt=prompt,
499 | negative_prompt=negative_prompt,
500 | user_id=user_id,
501 | engine_id=stability_engine_id
502 | ))
503 | tasks.append(task)
504 |
505 | for task in tasks:
506 | await task
507 |
508 | responses = [task.result() for task in tasks]
509 | responses = [response for response in responses if response is not None]
510 | total_response_json = collections.defaultdict(list)
511 | for key in responses[0]:
512 | for i, response in enumerate(responses):
513 | if isinstance(responses[0][key], list):
514 | total_response_json[key] += response[key]
515 | else:
516 | total_response_json[key] += [response[key]] * len(response["gs"])
517 |
518 | user_score = get_user_score(user_id)
519 | logger.info(
520 | f"Generation: {prompt=} | time={time.time() - start:.2f}(sec) | {user_id=} | {os.getpid()=} | {user_score=}")
521 | images = total_response_json.pop("images")
522 | image_uids = [str(uuid.uuid4()) for _ in range(len(images))]
523 | image_data = extract_image_data(total_response_json, image_uids)
524 | return images, image_uids, image_data
525 |
526 |
527 | async def get_stable_images(job):
528 | job.status = "running"
529 | await set_job(job.job_id, job)
530 | result = await create_images(job.prompt, job.user_id)
531 | if result is None:
532 | job.status = "failed"
533 | await set_job(job.job_id, job)
534 | else:
535 | job_id2images[job.job_id], job.image_uids, job_id2images_data[job.job_id] = result
536 | finished_job_id2uids[job.job_id] = job.image_uids
537 | # user_score = get_user_score(job.user_id)
538 | # logger.debug(
539 | # f"Finished: {job.prompt=} | {job.user_id=} | {job.job_id=} | {job.job_id in job_id2images} | {os.getpid()=} | {user_score=}")
540 | job.status = "finished"
541 | await set_job(job.job_id, job)
542 |
543 |
544 | async def consumer():
545 | # wait for service and update that we use it
546 | can_go_in = False
547 | while not can_go_in:
548 | async with RedLock(app.cache, "num_running", 1000):
549 | num_running = await app.cache.get("num_running")
550 | if num_running < MAX_SIZE_CONCURRENT:
551 | num_running += 1
552 | await app.cache.set("num_running", num_running)
553 | can_go_in = True
554 | await asyncio.sleep(0.5)
555 | # reduce the size of the queue
556 | should_run = True
557 | async with RedLock(app.cache, "qsize", 1000):
558 | queue = await app.cache.get("queue")
559 | if len(queue) == 0:
560 | should_run = False
561 | else:
562 | job_id = queue.popleft()
563 | await app.cache.set("qsize", len(queue))
564 | await app.cache.set("queue", queue)
565 |
566 | # run the job
567 | if should_run:
568 | job = await get_job(job_id)
569 | job.start_time = time.time()
570 | await set_job(job_id, job)
571 | # await get_random_images(job)
572 | await get_stable_images(job)
573 |
574 | # update num running
575 | async with RedLock(app.cache, "num_running", 1000):
576 | num_running = await app.cache.get("num_running")
577 | await app.cache.set("num_running", max(num_running - 1, 0))
578 |
579 |
580 | async def handle_images_request(prompt: str, user_id: str):
581 | async with RedLock(app.cache, f"qsize", 1000):
582 | qsize = await app.cache.get("qsize")
583 | if qsize >= MAX_SIZE_IN_QUEUE:
584 | return None
585 | job = Job(prompt=prompt, user_id=user_id)
586 | await set_job(job.job_id, job)
587 | queue = await app.cache.get("queue")
588 | queue.append(job.job_id)
589 | await app.cache.set("qsize", len(queue))
590 | await app.cache.set("queue", queue)
591 | return job.job_id
592 |
593 |
594 | def remove_punctuation_and_white_spaces(prompt_word):
595 | return prompt_word.translate(str.maketrans('', '', string.punctuation)).strip()
596 |
597 |
598 | def invalid_prompt(prompt):
599 | prompt = prompt.lower().strip()
600 | if any(illegal_token in prompt for illegal_token in illegal_tokens):
601 | return True
602 | for nsfw_phrase in nsfw_words:
603 | if len(nsfw_phrase.split(" ")) > 1:
604 | if nsfw_phrase.lower() in prompt:
605 | return True
606 | else:
607 | nsfw_word = nsfw_phrase.strip().lower()
608 | prompt_words = prompt.split(" ")
609 | for prompt_word in prompt_words:
610 | if remove_punctuation_and_white_spaces(prompt_word) == nsfw_word:
611 | return True
612 | if prompt_word == nsfw_word:
613 | return True
614 | return False
615 |
616 |
617 | @app.websocket("/ws")
618 | async def get_images(websocket: WebSocket):
619 | await websocket.accept()
620 | json_data = await websocket.receive_json()
621 | user_id, prompt = json_data["user_id"], json_data["prompt"]
622 | user_num_generated = get_num_images_per_user_last_week(user_id)
623 | logger.info(f"Request: {prompt=} | {user_id=} | {user_num_generated=}")
624 | job_id = await handle_images_request(prompt, user_id)
625 |
626 | if job_id is None or user_id in BLOCKED_IDS or invalid_prompt(prompt):
627 | if user_id in BLOCKED_IDS:
628 | await asyncio.sleep(60)
629 | await websocket.send_json({"status": "error"})
630 | elif user_num_generated >= MAX_IMAGES_PER_USER_PER_WEEK:
631 | await websocket.send_json({"status": "limit"})
632 | else:
633 | asyncio.create_task(consumer())
634 | asyncio.create_task(consumer())
635 | is_finished = False
636 | num_queued = 0
637 | while not is_finished:
638 | job = await get_job(job_id)
639 | is_finished = job.status in ["finished", "failed"]
640 | elapsed_time = time.time() - job.start_time
641 | estimated_time = await app.cache.get("estimated_running_time")
642 | progress_text = f"Generating |"
643 | if job.status == "queued":
644 | queue = await app.cache.get("queue")
645 | if job_id not in queue:
646 | logger.warning(f"job {job} job_id {job_id} not in queue {queue}")
647 | await asyncio.sleep(1)
648 | num_queued += 1
649 | if num_queued > 5:
650 | job.status = "failed"
651 | await set_job(job_id, job)
652 | continue
653 | queue_idx = queue.index(job_id)
654 | queue_real_position = (queue_idx // MAX_SIZE_CONCURRENT) + 1
655 | estimated_time = estimated_time * queue_real_position
656 | progress_text = f"Queue position: {queue_idx + 1}/{len(queue)} |"
657 | reported_estimated_time = estimated_time * 1.5
658 | progress_text += f" {round(elapsed_time, 1)}/{round(reported_estimated_time, 1)}s"
659 | job.progress = int(elapsed_time * 100 / reported_estimated_time) % 101
660 | message = {"status": job.status, "progress": job.progress, "progress_text": progress_text}
661 | try:
662 | if job.status in ["running", "queued"]:
663 | await websocket.send_json(message)
664 | await asyncio.sleep(0.5)
665 | elif job.status == "failed" or job_id not in job_id2images:
666 | logger.error(
667 | f"Job {job} {job_id} failed - {job_id} in job_id2images = {job_id in job_id2images} | {os.getpid()=}")
668 | await websocket.send_json({"status": "failed"})
669 | else:
670 | # print(job)
671 | await websocket.send_json(message)
672 | message["images"] = job_id2images[job_id]
673 | message["image_uids"] = job.image_uids
674 | print(type(message["images"][0]), message["images"][0][:5])
675 | if random.uniform(0, 1) < 0.1:
676 | message["images"].append(control_image_bytes.decode("utf-8"))
677 | message["image_uids"].append(CONTROL_IMAGE_UID)
678 | await websocket.send_json(message)
679 | await set_job(job_id, job)
680 | await app.cache.set("estimated_running_time", 0.5 * elapsed_time + 0.5 * estimated_time)
681 | # logger.debug(f"estimated running time {0.5 * elapsed_time + 0.5 * estimated_time:.2f}")
682 | except:
683 | logger.error(f"Failed to send message {message}")
684 | logger.error(traceback.format_exc())
685 | break
686 | if job_id is not None:
687 | await clean_job(job_id)
688 | await websocket.close()
689 |
690 |
691 | @app.post("/tweet/")
692 | async def tweet_images(tweet: TweetRequest, request: Request):
693 | user_id = request.session.get('user_id')
694 | if not user_id:
695 | return RedirectResponse(url='/')
696 |
697 | if user_id in BLOCKED_IDS:
698 | await asyncio.sleep(60)
699 | return "success"
700 |
701 | image_uid = tweet.image_uid
702 | prompt = tweet.prompt
703 | image_data = tweet.image_data
704 | user_id = tweet.user_id
705 | logger.debug(f"TWEET - inside tweet images")
706 | image = Image.open(BytesIO(base64.b64decode(image_data)))
707 | os.makedirs(f"images", exist_ok=True)
708 | image.save(f"images/{image_uid}.png")
709 | tweet_text = f"""{prompt}
710 | https://pickapic.io/
711 | Generate cool images for free and contribute to open science!"""
712 | logger.debug(f"tweeting {tweet_text=}")
713 | logger.debug(f"TWEET - before tweeting {tweet_text=}")
714 | status = twitter_api.update_status_with_media(tweet_text, f"images/{image_uid}.png")
715 | logger.debug(f"TWEET - after tweeting")
716 | image_path = f"images/{image_uid}.png"
717 | if os.path.exists(image_path):
718 | os.remove(image_path)
719 | tweet_text = f"{status.text}\n %23PickaPic\n %40PickaPicTweet"
720 | tweet_text = tweet_text.replace(' ', '+').replace('\n', '%0A')
721 | logger.debug(f"TWEET - returning text - {tweet_text=}")
722 | return {"status": "ok", "tweet_text": tweet_text}
723 |
724 |
725 | @app.post("/update_clicked_image/")
726 | async def update_clicked_image(data: UpdateImageRequest, background_tasks: BackgroundTasks, request: Request):
727 | user_id = request.session.get('user_id')
728 | if not user_id:
729 | return RedirectResponse(url='/')
730 | if user_id in BLOCKED_IDS:
731 | await asyncio.sleep(60)
732 | ip = request.client.host
733 | logger.info(f"IP of blocked user {user_id}: {ip=}")
734 | return "success"
735 | image_uids = data.image_uids
736 | ranking_data = RankingData(
737 | user_id=user_id,
738 | image_0_uid=image_uids[0],
739 | image_1_uid=image_uids[1],
740 | image_2_uid=image_uids[2] if len(image_uids) > 2 else None,
741 | image_3_uid=image_uids[3] if len(image_uids) > 3 else None,
742 | best_image_uid=data.image_uid,
743 | prompt=data.prompt,
744 | )
745 | background_tasks.add_task(add_ranking, ranking_data)
746 | background_tasks.add_task(increment_user_score, user_id)
747 | ip = request.client.host
748 | logger.debug(f"{user_id=} {ip=} clicked image {data.image_uid=}")
749 | return "success"
750 |
751 |
752 | @app.post("/update_download_image/")
753 | async def update_download_image(request: Request, data: UpdateImageRequest, background_tasks: BackgroundTasks):
754 | user_id = request.session.get('user_id')
755 | if not user_id:
756 | return RedirectResponse(url='/')
757 | if user_id in BLOCKED_IDS:
758 | await asyncio.sleep(60)
759 | return "success"
760 | image_uid = data.image_uid
761 | download_data = DownloadData(user_id, image_uid, data.prompt)
762 | background_tasks.add_task(add_download, download_data)
763 | logger.debug(f"Downloaded {image_uid}")
764 | return "success"
765 |
766 |
767 | def update_urls():
768 | working_urls = []
769 | bad_urls = []
770 | for backend_url in BACKEND_URLS:
771 | try:
772 | response = requests.get(backend_url.replace("generate", ""))
773 | if response.status_code == 200:
774 | working_urls.append(backend_url)
775 | except Exception as e:
776 | bad_urls.append(backend_url)
777 | # logger.debug(f"{backend_url=} failed with exception {e}")
778 | app.backend_urls = working_urls
779 | if len(working_urls) < len(BACKEND_URLS):
780 | logger.debug(
781 | f"Updated: {len(app.backend_urls)}/{len(BACKEND_URLS)}\nWorking URLs: {app.backend_urls}\nBad URLs: {bad_urls}")
782 |
783 |
784 | def clean_jobs():
785 | num_cleaned = 0
786 | job_ids = [job_id for job_id in finished_job_id2uids.keys() if job_id in job_id2images]
787 | time.sleep(15)
788 | for job_id in job_ids:
789 | if job_id not in finished_job_id2uids or job_id not in job_id2images:
790 | logger.warning(
791 | f"Cleaning 1: in finished_job_id2uids={job_id in finished_job_id2uids} or in job_id2images={job_id in job_id2images} for {job_id}")
792 | continue
793 | uids = finished_job_id2uids[job_id]
794 | user_id = job_id2images_data[job_id][0].user_id
795 | images = job_id2images[job_id]
796 | if user_id is not None:
797 | upload_images(images, uids)
798 | if job_id not in finished_job_id2uids or job_id not in job_id2images:
799 | logger.warning(
800 | f"Cleaning 2: in finished_job_id2uids={job_id in finished_job_id2uids} or in job_id2images={job_id in job_id2images} for {job_id}")
801 | continue
802 | del job_id2images[job_id]
803 | if user_id is not None:
804 | for image_data in job_id2images_data[job_id]:
805 | add_image(image_data)
806 | del job_id2images_data[job_id]
807 | del finished_job_id2uids[job_id]
808 | num_cleaned += 1
809 | if 0 < len(job_ids) != num_cleaned:
810 | logger.warning(f"Cleaned {num_cleaned}/{len(job_ids)} jobs")
811 | else:
812 | logger.debug(f"Cleaned {num_cleaned}/{len(job_ids)} jobs")
813 |
814 |
815 | def create_background_tasks():
816 | scheduler = BackgroundScheduler({'apscheduler.job_defaults.max_instances': 2})
817 | scheduler.add_job(func=update_urls, trigger="interval", seconds=180)
818 | scheduler.add_job(func=clean_jobs, trigger="interval", seconds=120)
819 | scheduler.add_job(func=update_csvs, trigger="interval", seconds=60 * 60 * 6)
820 | scheduler.start()
821 | logger.info("Started background tasks")
822 |
823 |
824 | def upload_csv(name, df):
825 | s3_client = boto3.client(
826 | 's3',
827 | aws_access_key_id=AWS_ACCESS_KEY,
828 | aws_secret_access_key=AWS_SECRET_KEY,
829 | )
830 | csv_dir = "csvs"
831 | os.makedirs(csv_dir, exist_ok=True)
832 | path = f"{csv_dir}/{name}.csv"
833 | df.to_csv(path)
834 | try:
835 | if os.path.exists(path):
836 | s3_client.upload_file(path,
837 | BUCKET_NAME,
838 | path,
839 | ExtraArgs=S3_EXTRA_ARGS)
840 | logger.info(f"Uploaded {path}")
841 | else:
842 | logger.warning(f"Couldn't upload images_df - path does not exist={os.path.exists(path)}")
843 | except Exception as e:
844 | logger.error(f"Couldn't upload path {path} - {e}")
845 |
846 | if os.path.exists(path):
847 | os.remove(path)
848 |
849 |
850 | def update_csvs():
851 | logger.info("Updating CSVs")
852 | images_df = get_all_images("2023-05-05 00:00:00")
853 | upload_csv("images", images_df)
854 | ranking_df = get_all_rankings("2023-05-05 00:00:00")
855 | upload_csv("rankings", ranking_df)
856 |
857 |
858 | @app.on_event("startup")
859 | # @repeat_every(seconds=60 * 15)
860 | async def startapp():
861 | print("Starting app")
862 | app.cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main", endpoint=url.hostname,
863 | port=url.port,
864 | password=url.password, timeout=0)
865 | async with RedLock(app.cache, "qsize", 1000):
866 | async with RedLock(app.cache, "num_running", 1000):
867 | create_user_table()
868 | create_image_table()
869 | create_rankings_table()
870 | create_downloads_table()
871 | create_user_score_table()
872 | create_background_tasks()
873 | update_csvs()
874 | global job_id2images, job_id2images_data, finished_job_id2uids
875 | job_id2images = {}
876 | job_id2images_data = {}
877 | finished_job_id2uids = {}
878 | await app.cache.set("backend_url_idx", 0)
879 | await app.cache.set("num_running", 0)
880 | await app.cache.set("qsize", 0)
881 | await app.cache.set("queue", collections.deque())
882 | await app.cache.set("estimated_running_time", 30)
883 |
884 |
885 | @app.get('/metrics')
886 | async def downloads(request: Request):
887 | user_id = request.session.get('user_id')
888 | if not user_id:
889 | return RedirectResponse(url='/')
890 | num_downloads = get_num_downloads()
891 | num_rankings = get_num_rankings()
892 | num_users = get_num_users()
893 | num_images = get_num_images()
894 | return {
895 | "num_downloads": num_downloads,
896 | "num_rankings": num_rankings,
897 | "num_users": num_users,
898 | "num_images": num_images,
899 | }
900 |
--------------------------------------------------------------------------------
/nsfw_words.json:
--------------------------------------------------------------------------------
1 | [
2 | "2g1c",
3 | "2 girls 1 cup",
4 | "acrotomophilia",
5 | "alabama hot pocket",
6 | "alaskan pipeline",
7 | "anal",
8 | "anilingus",
9 | "anus",
10 | "apeshit",
11 | "arsehole",
12 | "ass",
13 | "asshole",
14 | "assmunch",
15 | "auto erotic",
16 | "autoerotic",
17 | "babeland",
18 | "baby batter",
19 | "baby juice",
20 | "ball gag",
21 | "ball gravy",
22 | "ball kicking",
23 | "ball licking",
24 | "ball sack",
25 | "ball sucking",
26 | "bangbros",
27 | "bangbus",
28 | "bareback",
29 | "barely legal",
30 | "barenaked",
31 | "bastard",
32 | "bastardo",
33 | "bastinado",
34 | "bbw",
35 | "bdsm",
36 | "beaner",
37 | "beaners",
38 | "beaver cleaver",
39 | "beaver lips",
40 | "beastiality",
41 | "bestiality",
42 | "big black",
43 | "big breasts",
44 | "big knockers",
45 | "big tits",
46 | "bimbos",
47 | "birdlock",
48 | "bitch",
49 | "bitches",
50 | "black cock",
51 | "blonde action",
52 | "blonde on blonde action",
53 | "blowjob",
54 | "blow job",
55 | "blow your load",
56 | "blue waffle",
57 | "blumpkin",
58 | "bollocks",
59 | "bondage",
60 | "boner",
61 | "boob",
62 | "boobs",
63 | "booty call",
64 | "brown showers",
65 | "brunette action",
66 | "bukkake",
67 | "bulldyke",
68 | "bullet vibe",
69 | "bullshit",
70 | "bung hole",
71 | "bunghole",
72 | "busty",
73 | "butt",
74 | "buttcheeks",
75 | "butthole",
76 | "camel toe",
77 | "camgirl",
78 | "camslut",
79 | "camwhore",
80 | "carpet muncher",
81 | "carpetmuncher",
82 | "chocolate rosebuds",
83 | "cialis",
84 | "circlejerk",
85 | "cleveland steamer",
86 | "clit",
87 | "clitoris",
88 | "clover clamps",
89 | "clusterfuck",
90 | "cock",
91 | "cocks",
92 | "coprolagnia",
93 | "coprophilia",
94 | "cornhole",
95 | "coon",
96 | "coons",
97 | "creampie",
98 | "cum",
99 | "cumming",
100 | "cumshot",
101 | "cumshots",
102 | "cunnilingus",
103 | "cunt",
104 | "darkie",
105 | "date rape",
106 | "daterape",
107 | "deep throat",
108 | "deepthroat",
109 | "dendrophilia",
110 | "dick",
111 | "dildo",
112 | "dingleberry",
113 | "dingleberries",
114 | "dirty pillows",
115 | "dirty sanchez",
116 | "doggie style",
117 | "doggiestyle",
118 | "doggy style",
119 | "doggystyle",
120 | "dog style",
121 | "dolcett",
122 | "domination",
123 | "dominatrix",
124 | "dommes",
125 | "donkey punch",
126 | "double dong",
127 | "double penetration",
128 | "dp action",
129 | "dry hump",
130 | "dvda",
131 | "eat my ass",
132 | "ecchi",
133 | "ejaculation",
134 | "erotic",
135 | "erotism",
136 | "escort",
137 | "eunuch",
138 | "fag",
139 | "faggot",
140 | "fecal",
141 | "felch",
142 | "fellatio",
143 | "feltch",
144 | "female squirting",
145 | "femdom",
146 | "figging",
147 | "fingerbang",
148 | "fingering",
149 | "fisting",
150 | "foot fetish",
151 | "footjob",
152 | "frotting",
153 | "fuck",
154 | "fuck buttons",
155 | "fuckin",
156 | "fucking",
157 | "fucktards",
158 | "fudge packer",
159 | "fudgepacker",
160 | "futanari",
161 | "gangbang",
162 | "gang bang",
163 | "gay sex",
164 | "genitals",
165 | "giant cock",
166 | "girl on",
167 | "girl on top",
168 | "girls gone wild",
169 | "goatcx",
170 | "goatse",
171 | "god damn",
172 | "gokkun",
173 | "golden shower",
174 | "goodpoop",
175 | "goo girl",
176 | "goregasm",
177 | "grope",
178 | "group sex",
179 | "g-spot",
180 | "guro",
181 | "hand job",
182 | "handjob",
183 | "hard core",
184 | "hardcore",
185 | "hentai",
186 | "homoerotic",
187 | "honkey",
188 | "hooker",
189 | "horny",
190 | "hot carl",
191 | "hot chick",
192 | "how to kill",
193 | "how to murder",
194 | "huge fat",
195 | "humping",
196 | "incest",
197 | "intercourse",
198 | "jack off",
199 | "jail bait",
200 | "jailbait",
201 | "jelly donut",
202 | "jerk off",
203 | "jigaboo",
204 | "jiggaboo",
205 | "jiggerboo",
206 | "jizz",
207 | "juggs",
208 | "kike",
209 | "kinbaku",
210 | "kinkster",
211 | "kinky",
212 | "knobbing",
213 | "leather restraint",
214 | "leather straight jacket",
215 | "lemon party",
216 | "livesex",
217 | "lolita",
218 | "lovemaking",
219 | "make me come",
220 | "male squirting",
221 | "masturbate",
222 | "masturbating",
223 | "masturbation",
224 | "menage a trois",
225 | "milf",
226 | "missionary position",
227 | "mong",
228 | "motherfucker",
229 | "mound of venus",
230 | "mr hands",
231 | "muff diver",
232 | "muffdiving",
233 | "nambla",
234 | "nawashi",
235 | "negro",
236 | "neonazi",
237 | "nigga",
238 | "nigger",
239 | "nig nog",
240 | "nimphomania",
241 | "nipple",
242 | "nipples",
243 | "nsfw",
244 | "nsfw images",
245 | "nude",
246 | "nudity",
247 | "nutten",
248 | "nympho",
249 | "nymphomania",
250 | "octopussy",
251 | "omorashi",
252 | "one cup two girls",
253 | "one guy one jar",
254 | "orgasm",
255 | "orgy",
256 | "paedophile",
257 | "paki",
258 | "panties",
259 | "panty",
260 | "pedobear",
261 | "pedophile",
262 | "pegging",
263 | "penis",
264 | "phone sex",
265 | "piece of shit",
266 | "pikey",
267 | "pissing",
268 | "piss pig",
269 | "pisspig",
270 | "playboy",
271 | "pleasure chest",
272 | "pole smoker",
273 | "ponyplay",
274 | "poof",
275 | "poon",
276 | "poontang",
277 | "punany",
278 | "poop chute",
279 | "poopchute",
280 | "porn",
281 | "porno",
282 | "pornography",
283 | "prince albert piercing",
284 | "pthc",
285 | "pubes",
286 | "pussy",
287 | "queaf",
288 | "queef",
289 | "quim",
290 | "raghead",
291 | "raging boner",
292 | "rape",
293 | "raping",
294 | "rapist",
295 | "rectum",
296 | "reverse cowgirl",
297 | "rimjob",
298 | "rimming",
299 | "rosy palm",
300 | "rosy palm and her 5 sisters",
301 | "rusty trombone",
302 | "sadism",
303 | "santorum",
304 | "scat",
305 | "schlong",
306 | "scissoring",
307 | "semen",
308 | "sex",
309 | "sexcam",
310 | "sexo",
311 | "sexy",
312 | "sexual",
313 | "sexually",
314 | "sexuality",
315 | "shaved beaver",
316 | "shaved pussy",
317 | "shemale",
318 | "shibari",
319 | "shit",
320 | "shitblimp",
321 | "shitty",
322 | "shota",
323 | "shrimping",
324 | "skeet",
325 | "slanteye",
326 | "slut",
327 | "s&m",
328 | "smut",
329 | "snatch",
330 | "snowballing",
331 | "sodomize",
332 | "sodomy",
333 | "spastic",
334 | "spic",
335 | "splooge",
336 | "splooge moose",
337 | "spooge",
338 | "spread legs",
339 | "spunk",
340 | "strap on",
341 | "strapon",
342 | "strappado",
343 | "strip club",
344 | "style doggy",
345 | "suck",
346 | "sucks",
347 | "suicide girls",
348 | "sultry women",
349 | "swastika",
350 | "swinger",
351 | "tainted love",
352 | "taste my",
353 | "tea bagging",
354 | "threesome",
355 | "throating",
356 | "thumbzilla",
357 | "tied up",
358 | "tight white",
359 | "tit",
360 | "tits",
361 | "titties",
362 | "titty",
363 | "tongue in a",
364 | "topless",
365 | "tosser",
366 | "towelhead",
367 | "tranny",
368 | "tribadism",
369 | "tub girl",
370 | "tubgirl",
371 | "tushy",
372 | "twat",
373 | "twink",
374 | "twinkie",
375 | "two girls one cup",
376 | "undressing",
377 | "upskirt",
378 | "urethra play",
379 | "urophilia",
380 | "vagina",
381 | "venus mound",
382 | "viagra",
383 | "vibrator",
384 | "violet wand",
385 | "vorarephilia",
386 | "voyeur",
387 | "voyeurweb",
388 | "voyuer",
389 | "vulva",
390 | "wank",
391 | "wetback",
392 | "wet dream",
393 | "white power",
394 | "whore",
395 | "worldsex",
396 | "wrapping men",
397 | "wrinkled starfish",
398 | "xx",
399 | "xxx",
400 | "yaoi",
401 | "yellow showers",
402 | "yiffy",
403 | "zoophilia",
404 | "\ud83d\udd95",
405 | "seductive",
406 | "sexy",
407 | "nude",
408 | "naked",
409 | "nudity",
410 | "naked",
411 | "busty",
412 | "boobs",
413 | "shirtless",
414 | "nude",
415 | "revealing",
416 | "naked",
417 | "breast",
418 | "amouranth",
419 | "nigger",
420 | "sussy",
421 | "tits",
422 | "lingerie",
423 | "sex",
424 | "bikini",
425 | "nazi",
426 | "underwear",
427 | "stomach",
428 | "thong",
429 | "fuck",
430 | "lola myluv",
431 | "elsa jean",
432 | "porn",
433 | "courtesan",
434 | "b00bs",
435 | "undressed",
436 | "anal",
437 | "blowjo",
438 | "p0rn",
439 | "sxy",
440 | "blowjo",
441 | "cumshot",
442 | "vagina",
443 | "horny",
444 | "pussy",
445 | "onlyfans",
446 | "crotch",
447 | "gagg",
448 | "nudist",
449 | "lecherous",
450 | "voluptuous",
451 | "buxom",
452 | "lustful",
453 | "genitals",
454 | "nymph",
455 | "camgirl",
456 | "exposed",
457 | "babe",
458 | "hitler",
459 | "nakey",
460 | "suck",
461 | "blowjob",
462 | "unclothed",
463 | "wearing nothing",
464 | "breasted",
465 | "nsfw",
466 | "erotic",
467 | "hentai",
468 | "topless",
469 | "nipple",
470 | "without clothes",
471 | "playboy",
472 | "b\u00f8\u00f8bs",
473 | "booty",
474 | "leotard",
475 | "nudist",
476 | "scantily clad",
477 | "salacious",
478 | "minimalZ clothes",
479 | "see through",
480 | "bulge",
481 | "onahole",
482 | "loincloth",
483 | "man posing",
484 | "jockstrap",
485 | "bra",
486 | "shower",
487 | "femdom",
488 | "harvey weinstein",
489 | "emma watson",
490 | "curvaceous",
491 | "sensuous",
492 | "chest",
493 | "no clothes",
494 | "scantily",
495 | "undies",
496 | "bulging",
497 | "swimsuit",
498 | "belly",
499 | "sweatpants",
500 | "suntanning",
501 | "naughty",
502 | "jada stevens",
503 | "cleavage",
504 | "panty",
505 | "virgin",
506 | "pusy",
507 | "catgirl",
508 | "nue",
509 | "nue",
510 | "loli",
511 | "shota",
512 | "futanari",
513 | "hentai",
514 | "ecchi",
515 | "camboy",
516 | "catboy",
517 | "without cloth",
518 | "unclothes",
519 | "no clothing",
520 | "bare skin",
521 | "bareskinned",
522 | "hunk",
523 | "pubes",
524 | "bukkake",
525 | "1girl",
526 | "bondaged",
527 | "big bust",
528 | "under-wear",
529 | "intimating",
530 | "big boops",
531 | "nippies",
532 | "nudify",
533 | "onanie",
534 | "clothesless",
535 | "spread legs",
536 | "desnuda",
537 | "penetration",
538 | "BDSM",
539 | "manspreading",
540 | "transparent",
541 | "see-through",
542 | "skinny dipping",
543 | "bathing",
544 | "girl natural",
545 | "naturist",
546 | "wet t-shirt contest",
547 | "Busting",
548 | "bosom",
549 | "masterbating",
550 | "masterbate",
551 | "jacked",
552 | "no skirt",
553 | "legs spread",
554 | "bust",
555 | "seducing",
556 | "seethrough"
557 | ]
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiocache==0.11.1
2 | aiohttp==3.7.4.post0
3 | aiomcache==0.8.0
4 | aioredis==1.3.1
5 | aiosignal==1.3.1
6 | anyio==3.6.2
7 | APScheduler==3.9.1.post1
8 | async-timeout==3.0.1
9 | attrs==22.2.0
10 | Authlib==1.2.0
11 | boto3==1.18.16
12 | botocore==1.21.65
13 | cffi==1.15.1
14 | chardet==4.0.0
15 | charset-normalizer==2.1.1
16 | click==8.1.3
17 | cryptography==38.0.4
18 | discord.py==1.7.3
19 | dnspython==2.2.1
20 | email-validator==1.3.0
21 | fastapi==0.88.0
22 | fastapi-utils==0.2.1
23 | frozenlist==1.3.3
24 | gunicorn==20.1.0
25 | h11==0.14.0
26 | hiredis==2.1.0
27 | httpcore==0.16.3
28 | httptools==0.5.0
29 | httpx==0.23.1
30 | idna==3.4
31 | itsdangerous==2.1.2
32 | Jinja2==3.1.2
33 | jmespath==0.10.0
34 | MarkupSafe==2.1.1
35 | multidict==6.0.4
36 | numpy==1.24.0
37 | oauthlib==3.2.2
38 | orjson==3.8.3
39 | pandas==1.5.2
40 | Pillow==9.3.0
41 | psycopg2==2.9.5
42 | pycparser==2.21
43 | pydantic==1.10.2
44 | python-dateutil==2.8.2
45 | python-dotenv==0.21.0
46 | python-multipart==0.0.5
47 | pytz==2022.7
48 | pytz-deprecation-shim==0.1.0.post0
49 | PyYAML==6.0
50 | requests==2.28.1
51 | requests-oauthlib==1.3.1
52 | rfc3986==1.5.0
53 | s3transfer==0.5.2
54 | six==1.16.0
55 | sniffio==1.3.0
56 | SQLAlchemy==1.4.46
57 | starlette==0.22.0
58 | starlette-discord==0.2.1
59 | tweepy==4.12.1
60 | typing_extensions==4.4.0
61 | tzdata==2022.7
62 | tzlocal==4.2
63 | ujson==5.6.0
64 | urllib3==1.26.13
65 | uvicorn==0.20.0
66 | uvloop==0.17.0
67 | watchfiles==0.18.1
68 | websockets==10.4
69 | yarl==1.8.2
--------------------------------------------------------------------------------
/sql_db/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import psycopg2
3 |
4 | DATABASE_URL = os.environ['DATABASE_URL']
5 |
6 |
7 | def get_num_rows(table_name: str) -> int:
8 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
9 | cursor = conn.cursor()
10 | cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
11 | num_rows = cursor.fetchone()[0]
12 | cursor.close()
13 | conn.close()
14 | return num_rows
15 |
--------------------------------------------------------------------------------
/sql_db/downloads.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 | from dataclasses import dataclass
3 | import pandas as pd
4 |
5 | from sql_db import DATABASE_URL, get_num_rows
6 | from utils.logging_utils import logger
7 |
8 |
9 | def create_downloads_table():
10 | # Create table if it doesn't already exist
11 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
12 | cursor = conn.cursor()
13 | cursor.execute("select exists(select * from information_schema.tables where table_name=%s)", ('downloads',))
14 | if cursor.fetchone()[0]:
15 | pass
16 | else:
17 | cursor.execute(
18 | '''
19 | CREATE TABLE downloads (download_id SERIAL PRIMARY KEY,
20 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
21 | user_id INTEGER,
22 | image_uid TEXT,
23 | prompt TEXT,
24 | FOREIGN KEY(user_id) REFERENCES users(user_id))
25 | ''')
26 | conn.commit()
27 | logger.info("Created table downloads")
28 | cursor.close()
29 | conn.close()
30 |
31 |
32 | @dataclass
33 | class DownloadSchema:
34 | download_id: str
35 | created_at: str
36 | user_id: int
37 | image_uid: str
38 | prompt: str
39 |
40 |
41 | @dataclass
42 | class DownloadData:
43 | user_id: int
44 | image_uid: str
45 | prompt: str
46 |
47 |
48 | def add_download(download: DownloadData):
49 | prompt = download.prompt.replace("'", "[single_quote]")
50 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
51 | cursor = conn.cursor()
52 | cursor.execute("INSERT INTO downloads (user_id, image_uid, prompt) VALUES (%s, %s, %s)",
53 | (download.user_id, download.image_uid, prompt))
54 | conn.commit()
55 | cursor.close()
56 | conn.close()
57 |
58 |
59 | def get_all_downloads() -> pd.DataFrame:
60 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
61 | cursor = conn.cursor()
62 | cursor.execute(f"SELECT * FROM downloads")
63 | rankings = cursor.fetchall()
64 | cursor.close()
65 | conn.close()
66 | df = pd.DataFrame(rankings,
67 | columns=['download_id', 'created_at', 'user_id', 'image_uid', 'prompt'])
68 | return df
69 |
70 |
71 | def get_num_downloads() -> int:
72 | num_rows = get_num_rows("downloads")
73 | return num_rows
74 |
--------------------------------------------------------------------------------
/sql_db/images.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | import pandas as pd
3 | import psycopg2
4 |
5 | from sql_db import DATABASE_URL, get_num_rows
6 | from utils.logging_utils import logger
7 |
8 |
9 | def create_image_table():
10 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
11 | cursor = conn.cursor()
12 | cursor.execute("select exists(select * from information_schema.tables where table_name=%s)", ('images',))
13 | if cursor.fetchone()[0]:
14 | pass
15 | else:
16 | cursor.execute(
17 | '''
18 | CREATE TABLE images (image_id SERIAL PRIMARY KEY,
19 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
20 | image_uid TEXT UNIQUE,
21 | user_id INTEGER,
22 | prompt TEXT,
23 | negative_prompt TEXT,
24 | seed INTEGER,
25 | gs REAL,
26 | steps INTEGER,
27 | idx INTEGER,
28 | num_generated INTEGER,
29 | scheduler_cls TEXT,
30 | model_id TEXT,
31 | FOREIGN KEY(user_id) REFERENCES users(user_id))
32 | ''')
33 | conn.commit()
34 | logger.info("Created table images")
35 | cursor.close()
36 | conn.close()
37 |
38 |
39 | @dataclass(frozen=True)
40 | class ImageData:
41 | image_uid: str
42 | user_id: int
43 | prompt: str
44 | negative_prompt: str
45 | seed: int
46 | gs: float
47 | steps: int
48 | idx: int
49 | num_generated: int
50 | scheduler_cls: str
51 | model_id: str
52 |
53 |
54 | def add_image(image_data: ImageData):
55 | prompt = image_data.prompt.replace("'", "[single_quote]")
56 | image_uid = image_data.image_uid
57 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
58 | cursor = conn.cursor()
59 | cursor.execute("SELECT * FROM images WHERE image_uid=%s", (image_uid,))
60 | image = cursor.fetchone()
61 | if image is not None:
62 | pass
63 | else:
64 | cursor.execute(
65 | "INSERT INTO images (image_uid, user_id, prompt, negative_prompt, seed, gs, steps, idx, num_generated, scheduler_cls, model_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
66 | (image_uid, image_data.user_id, prompt, image_data.negative_prompt, image_data.seed, image_data.gs,
67 | image_data.steps, image_data.idx, image_data.num_generated, image_data.scheduler_cls, image_data.model_id))
68 |
69 | conn.commit()
70 | # logger.debug(f"Added image with uid {image_uid}")
71 | cursor.close()
72 | conn.close()
73 | return
74 |
75 |
76 | def get_all_images(start_date=None) -> pd.DataFrame:
77 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
78 | cursor = conn.cursor()
79 | if start_date is not None:
80 | cursor.execute("SELECT * FROM images WHERE created_at >= %s", (start_date,))
81 | else:
82 | cursor.execute("SELECT * FROM images")
83 | images = cursor.fetchall()
84 | cursor.close()
85 | conn.close()
86 | df = pd.DataFrame(images, columns=['image_id',
87 | 'created_at',
88 | 'image_uid',
89 | 'user_id',
90 | 'prompt',
91 | 'negative_prompt',
92 | 'seed',
93 | 'gs',
94 | 'steps',
95 | 'idx',
96 | 'num_generated',
97 | 'scheduler_cls',
98 | 'model_id'])
99 | return df
100 |
101 |
102 | def get_num_images() -> int:
103 | num_rows = get_num_rows("images")
104 | return num_rows
105 |
106 |
107 | def get_num_images_per_user_last_week(user_id):
108 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
109 | cursor = conn.cursor()
110 | cursor.execute("SELECT * FROM images WHERE user_id=%s AND created_at >= NOW() - INTERVAL '7 days'", (user_id,))
111 | images = cursor.fetchall()
112 | cursor.close()
113 | conn.close()
114 | df = pd.DataFrame(images, columns=['image_id',
115 | 'created_at',
116 | 'image_uid',
117 | 'user_id',
118 | 'prompt',
119 | 'negative_prompt',
120 | 'seed',
121 | 'gs',
122 | 'steps',
123 | 'idx',
124 | 'num_generated',
125 | 'scheduler_cls',
126 | 'model_id'])
127 | return len(df)
128 |
--------------------------------------------------------------------------------
/sql_db/rankings.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 | from dataclasses import dataclass
3 | import pandas as pd
4 |
5 | from sql_db import DATABASE_URL, get_num_rows
6 | from utils.logging_utils import logger
7 |
8 |
9 | def create_rankings_table():
10 | # Create table if it doesn't already exist
11 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
12 | cursor = conn.cursor()
13 | cursor.execute("select exists(select * from information_schema.tables where table_name=%s)", ('rankings',))
14 | if cursor.fetchone()[0]:
15 | pass
16 | else:
17 | cursor.execute(
18 | '''
19 | CREATE TABLE rankings (ranking_id SERIAL PRIMARY KEY,
20 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
21 | user_id INTEGER,
22 | image_0_uid TEXT,
23 | image_1_uid TEXT,
24 | image_2_uid TEXT,
25 | image_3_uid TEXT,
26 | best_image_uid TEXT,
27 | prompt TEXT,
28 | FOREIGN KEY(user_id) REFERENCES users(user_id))
29 | ''')
30 | conn.commit()
31 | logger.info("Created table rankings")
32 | cursor.close()
33 | conn.close()
34 |
35 |
36 | @dataclass
37 | class RankingSchema:
38 | ranking_id: str
39 | created_at: str
40 | user_id: int
41 | image_0_uid: str
42 | image_1_uid: str
43 | image_2_uid: str
44 | image_3_uid: str
45 | best_image_uid: str
46 | prompt: str
47 |
48 |
49 | @dataclass
50 | class RankingData:
51 | user_id: int
52 | image_0_uid: str
53 | image_1_uid: str
54 | image_2_uid: str
55 | image_3_uid: str
56 | best_image_uid: str
57 | prompt: str
58 |
59 |
60 | def add_ranking(ranking: RankingData):
61 | prompt = ranking.prompt.replace("'", "[single_quote]")
62 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
63 | cursor = conn.cursor()
64 | cursor.execute(
65 | "INSERT INTO rankings (user_id, image_0_uid, image_1_uid, image_2_uid, image_3_uid, best_image_uid, prompt) VALUES (%s, %s, %s, %s, %s, %s, %s)",
66 | (ranking.user_id, ranking.image_0_uid, ranking.image_1_uid, ranking.image_2_uid, ranking.image_3_uid,
67 | ranking.best_image_uid, prompt))
68 | conn.commit()
69 | cursor.close()
70 | conn.close()
71 |
72 |
73 | def get_all_rankings(start_date=None) -> pd.DataFrame:
74 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
75 | cursor = conn.cursor()
76 | if start_date is not None:
77 | cursor.execute(f"SELECT * FROM rankings WHERE created_at >= %s", (start_date,))
78 | else:
79 | cursor.execute(f"SELECT * FROM rankings")
80 | rankings = cursor.fetchall()
81 | cursor.close()
82 | conn.close()
83 | df = pd.DataFrame(rankings,
84 | columns=['ranking_id', 'created_at', 'user_id', 'image_1_uid', 'image_2_uid', 'image_3_uid',
85 | 'image_4_uid', 'best_image_uid', 'prompt'])
86 | return df
87 |
88 |
89 | def get_num_rankings() -> int:
90 | num_rows = get_num_rows("rankings")
91 | return num_rows
92 |
--------------------------------------------------------------------------------
/sql_db/user_score.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | import pandas as pd
3 | import psycopg2
4 |
5 | from sql_db import DATABASE_URL, get_num_rows
6 | from utils.logging_utils import logger
7 |
8 |
9 | def create_user_score_table():
10 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
11 | cursor = conn.cursor()
12 | cursor.execute("select exists(select * from information_schema.tables where table_name=%s)", ('user_score',))
13 | if cursor.fetchone()[0]:
14 | pass
15 | else:
16 | cursor.execute(
17 | '''
18 | CREATE TABLE user_score (user_id SERIAL PRIMARY KEY,
19 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
20 | score INTEGER)
21 | ''')
22 | conn.commit()
23 | logger.info("Created table user score")
24 | cursor.close()
25 | conn.close()
26 |
27 |
28 | @dataclass
29 | class UserScoreSchema:
30 | user_id: str
31 | created_at: str
32 | score: int
33 |
34 |
35 | def increment_user_score(user_id: int):
36 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
37 | cursor = conn.cursor()
38 | cursor.execute("SELECT user_id FROM user_score WHERE user_id = %s", (user_id,))
39 | if cursor.fetchone() is None:
40 | cursor.execute("INSERT INTO user_score (user_id, score) VALUES (%s, %s)", (user_id, 1))
41 | conn.commit()
42 | logger.info(f"User {user_id} created and score incremented to 1.")
43 | else:
44 | cursor.execute("UPDATE user_score SET score = score + 1 WHERE user_id = %s", (user_id,))
45 | conn.commit()
46 | # logger.info(f"User {user_id} score incremented.")
47 | cursor.close()
48 | conn.close()
49 | user_score = get_user_score(user_id)
50 | if user_score > 5000:
51 | logger.warning(f"User {user_id} score incremented - {user_score=}.")
52 |
53 |
54 | def get_user_score(user_id: int) -> int:
55 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
56 | cursor = conn.cursor()
57 | cursor.execute("SELECT user_id, created_at, score FROM user_score WHERE user_id = %s", (user_id,))
58 | result = cursor.fetchone()
59 | cursor.close()
60 | conn.close()
61 | if result is None:
62 | return 0
63 | user_score = UserScoreSchema(*result)
64 | return user_score.score
65 |
66 |
--------------------------------------------------------------------------------
/sql_db/users.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | import pandas as pd
3 | import psycopg2
4 |
5 | from sql_db import DATABASE_URL, get_num_rows
6 | from utils.logging_utils import logger
7 |
8 |
9 | def create_user_table():
10 | # Create table if it doesn't already exist
11 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
12 | cursor = conn.cursor()
13 | cursor.execute("select exists(select * from information_schema.tables where table_name=%s)", ('users',))
14 | if cursor.fetchone()[0]:
15 | pass
16 | else:
17 | cursor.execute(
18 | '''
19 | CREATE TABLE users (user_id SERIAL PRIMARY KEY,
20 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
21 | email TEXT UNIQUE,
22 | name TEXT)
23 | ''')
24 | conn.commit()
25 | logger.info("Created table users")
26 | cursor.close()
27 | conn.close()
28 |
29 |
30 | @dataclass
31 | class UserSchema:
32 | user_id: str
33 | created_at: str
34 | email: str
35 | name: str
36 |
37 |
38 | def add_user(email: str, name: str):
39 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
40 | cursor = conn.cursor()
41 | user = get_user_by_email(email)
42 | if user is None:
43 | logger.info(f"Adding user {name} with email {email}")
44 | cursor.execute("INSERT INTO users (email, name) VALUES (%s, %s)", (email, name))
45 | conn.commit()
46 | user = get_user_by_email(email)
47 | else:
48 | logger.info(f"User {name} with email {email} already exists")
49 | cursor.close()
50 | conn.close()
51 | return user.user_id
52 |
53 |
54 | def get_user_by_email(email: str):
55 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
56 | cursor = conn.cursor()
57 | cursor.execute(f"SELECT * FROM users WHERE email=%s", (email,))
58 | user = cursor.fetchone()
59 | if user is None:
60 | return None
61 | cursor.close()
62 | conn.close()
63 | return UserSchema(*user) if user is not None else None
64 |
65 |
66 | def get_users_by_name(name: str):
67 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
68 | cursor = conn.cursor()
69 | cursor.execute(f"SELECT * FROM users WHERE name=%s", (name,))
70 | users = cursor.fetchall()
71 | cursor.close()
72 | conn.close()
73 | return users
74 |
75 |
76 | def get_all_users() -> pd.DataFrame:
77 | conn = psycopg2.connect(DATABASE_URL, sslmode='require')
78 | cursor = conn.cursor()
79 | cursor.execute(f"SELECT * FROM users")
80 | users = cursor.fetchall()
81 | cursor.close()
82 | conn.close()
83 | df = pd.DataFrame(users, columns=['user_id', 'created_at', 'email', 'name'])
84 | return df
85 |
86 |
87 | def get_num_users() -> int:
88 | num_rows = get_num_rows("users")
89 | return num_rows
90 |
--------------------------------------------------------------------------------
/static/introjs-modern.css:
--------------------------------------------------------------------------------
1 | .introjs-tooltip {
2 | background-color: rgba(000, 0, 0, 0.5);
3 | color: #fff;
4 | }
5 |
6 | .introjs-button,
7 | .introjs-button:hover, .introjs-button:focus, .introjs-button:active,
8 | .introjs-disabled, .introjs-disabled:focus, .introjs-disabled:hover {
9 | outline: none;
10 | background-image: none;
11 | background-color: transparent;
12 | color: #fff;
13 | border: 1px solid transparent;
14 | border-radius: 50px;
15 | box-shadow: none;
16 | border-shadow: none;
17 | text-shadow: none;
18 | }
19 |
20 | .introjs-button:hover, .introjs-button:focus, .introjs-button:active {
21 | border: 1px solid #fff
22 | }
23 | .introjs-disabled, .introjs-disabled:focus, .introjs-disabled:hover {
24 | color: #ccc;
25 | border: 1px solid transparent;
26 | }
27 |
28 | .introjs-arrow {
29 | border: 10px solid #fff;
30 | }
31 | .introjs-arrow.top, .introjs-arrow.top-middle, .introjs-arrow.top-right {
32 | border-color: transparent transparent rgba(000, 0, 0, 0.5);
33 | top: -20px;
34 | left: 20px;
35 | }
36 | .introjs-arrow.bottom, .introjs-arrow.bottom-middle, .introjs-arrow.bottom-right {
37 | border-color: rgba(000, 0, 0, 0.5) transparent transparent;
38 | bottom: -20px;
39 | left: 20px;
40 | }
41 | .introjs-arrow.left, .introjs-arrow.right {
42 | top: 20px;
43 | }
44 | .introjs-arrow.left-bottom, .introjs-arrow.right-bottom {
45 | bottom: 20px;
46 | }
47 |
48 | .introjs-arrow.left, .introjs-arrow.left-bottom {
49 | left: -20px;
50 | border-color: transparent rgba(000, 0, 0, 0.5) transparent transparent;
51 | }
52 | .introjs-arrow.right, .introjs-arrow.right-bottom {
53 | right: -20px;
54 | border-color: transparent transparent transparent rgba(000, 0, 0, 0.5);
55 | }
--------------------------------------------------------------------------------
/static/normalize.css:
--------------------------------------------------------------------------------
1 | /*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */
2 |
3 | /* Document
4 | ========================================================================== */
5 |
6 | /**
7 | * 1. Correct the line height in all browsers.
8 | * 2. Prevent adjustments of font size after orientation changes in iOS.
9 | */
10 |
11 | html {
12 | line-height: 1.15; /* 1 */
13 | -webkit-text-size-adjust: 100%; /* 2 */
14 | display: table;
15 | margin: auto;
16 | }
17 |
18 | /* Sections
19 | ========================================================================== */
20 |
21 | /**
22 | * Remove the margin in all browsers.
23 | */
24 |
25 | body {
26 | margin: 0;
27 | }
28 |
29 | /**
30 | * Render the `main` element consistently in IE.
31 | */
32 |
33 | main {
34 | display: block;
35 | }
36 |
37 | /**
38 | * Correct the font size and margin on `h1` elements within `section` and
39 | * `article` contexts in Chrome, Firefox, and Safari.
40 | */
41 |
42 | h1 {
43 | font-size: 2em;
44 | margin: 0.67em 0;
45 | }
46 |
47 | /* Grouping content
48 | ========================================================================== */
49 |
50 | /**
51 | * 1. Add the correct box sizing in Firefox.
52 | * 2. Show the overflow in Edge and IE.
53 | */
54 |
55 | hr {
56 | box-sizing: content-box; /* 1 */
57 | height: 0; /* 1 */
58 | overflow: visible; /* 2 */
59 | }
60 |
61 | /**
62 | * 1. Correct the inheritance and scaling of font size in all browsers.
63 | * 2. Correct the odd `em` font sizing in all browsers.
64 | */
65 |
66 | pre {
67 | font-family: monospace, monospace; /* 1 */
68 | font-size: 1em; /* 2 */
69 | }
70 |
71 | /* Text-level semantics
72 | ========================================================================== */
73 |
74 | /**
75 | * Remove the gray background on active links in IE 10.
76 | */
77 |
78 | a {
79 | background-color: transparent;
80 | }
81 |
82 | /**
83 | * 1. Remove the bottom border in Chrome 57-
84 | * 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari.
85 | */
86 |
87 | abbr[title] {
88 | border-bottom: none; /* 1 */
89 | text-decoration: underline; /* 2 */
90 | text-decoration: underline dotted; /* 2 */
91 | }
92 |
93 | /**
94 | * Add the correct font weight in Chrome, Edge, and Safari.
95 | */
96 |
97 | b,
98 | strong {
99 | font-weight: bolder;
100 | }
101 |
102 | /**
103 | * 1. Correct the inheritance and scaling of font size in all browsers.
104 | * 2. Correct the odd `em` font sizing in all browsers.
105 | */
106 |
107 | code,
108 | kbd,
109 | samp {
110 | font-family: monospace, monospace; /* 1 */
111 | font-size: 1em; /* 2 */
112 | }
113 |
114 | /**
115 | * Add the correct font size in all browsers.
116 | */
117 |
118 | small {
119 | font-size: 80%;
120 | }
121 |
122 | /**
123 | * Prevent `sub` and `sup` elements from affecting the line height in
124 | * all browsers.
125 | */
126 |
127 | sub,
128 | sup {
129 | font-size: 75%;
130 | line-height: 0;
131 | position: relative;
132 | vertical-align: baseline;
133 | }
134 |
135 | sub {
136 | bottom: -0.25em;
137 | }
138 |
139 | sup {
140 | top: -0.5em;
141 | }
142 |
143 | /* Embedded content
144 | ========================================================================== */
145 |
146 | /**
147 | * Remove the border on images inside links in IE 10.
148 | */
149 |
150 | img {
151 | border-style: none;
152 | }
153 |
154 | /* Forms
155 | ========================================================================== */
156 |
157 | /**
158 | * 1. Change the font styles in all browsers.
159 | * 2. Remove the margin in Firefox and Safari.
160 | */
161 |
162 | button,
163 | input,
164 | optgroup,
165 | select,
166 | textarea {
167 | font-family: inherit; /* 1 */
168 | font-size: 100%; /* 1 */
169 | line-height: 1.15; /* 1 */
170 | margin: 0; /* 2 */
171 | }
172 |
173 | /**
174 | * Show the overflow in IE.
175 | * 1. Show the overflow in Edge.
176 | */
177 |
178 | button,
179 | input { /* 1 */
180 | overflow: visible;
181 | }
182 |
183 | /**
184 | * Remove the inheritance of text transform in Edge, Firefox, and IE.
185 | * 1. Remove the inheritance of text transform in Firefox.
186 | */
187 |
188 | button,
189 | select { /* 1 */
190 | text-transform: none;
191 | }
192 |
193 | /**
194 | * Correct the inability to style clickable types in iOS and Safari.
195 | */
196 |
197 | button,
198 | [type="button"],
199 | [type="reset"],
200 | [type="submit"] {
201 | -webkit-appearance: button;
202 | }
203 |
204 | /**
205 | * Remove the inner border and padding in Firefox.
206 | */
207 |
208 | button::-moz-focus-inner,
209 | [type="button"]::-moz-focus-inner,
210 | [type="reset"]::-moz-focus-inner,
211 | [type="submit"]::-moz-focus-inner {
212 | border-style: none;
213 | padding: 0;
214 | }
215 |
216 | /**
217 | * Restore the focus styles unset by the previous rule.
218 | */
219 |
220 | button:-moz-focusring,
221 | [type="button"]:-moz-focusring,
222 | [type="reset"]:-moz-focusring,
223 | [type="submit"]:-moz-focusring {
224 | outline: 1px dotted ButtonText;
225 | }
226 |
227 | /**
228 | * Correct the padding in Firefox.
229 | */
230 |
231 | fieldset {
232 | padding: 0.35em 0.75em 0.625em;
233 | }
234 |
235 | /**
236 | * 1. Correct the text wrapping in Edge and IE.
237 | * 2. Correct the color inheritance from `fieldset` elements in IE.
238 | * 3. Remove the padding so developers are not caught out when they zero out
239 | * `fieldset` elements in all browsers.
240 | */
241 |
242 | legend {
243 | box-sizing: border-box; /* 1 */
244 | color: inherit; /* 2 */
245 | display: table; /* 1 */
246 | max-width: 100%; /* 1 */
247 | padding: 0; /* 3 */
248 | white-space: normal; /* 1 */
249 | }
250 |
251 | /**
252 | * Add the correct vertical alignment in Chrome, Firefox, and Opera.
253 | */
254 |
255 | progress {
256 | vertical-align: baseline;
257 | }
258 |
259 | /**
260 | * Remove the default vertical scrollbar in IE 10+.
261 | */
262 |
263 | textarea {
264 | overflow: auto;
265 | }
266 |
267 | /**
268 | * 1. Add the correct box sizing in IE 10.
269 | * 2. Remove the padding in IE 10.
270 | */
271 |
272 | [type="checkbox"],
273 | [type="radio"] {
274 | box-sizing: border-box; /* 1 */
275 | padding: 0; /* 2 */
276 | }
277 |
278 | /**
279 | * Correct the cursor style of increment and decrement buttons in Chrome.
280 | */
281 |
282 | [type="number"]::-webkit-inner-spin-button,
283 | [type="number"]::-webkit-outer-spin-button {
284 | height: auto;
285 | }
286 |
287 | /**
288 | * 1. Correct the odd appearance in Chrome and Safari.
289 | * 2. Correct the outline style in Safari.
290 | */
291 |
292 | [type="search"] {
293 | -webkit-appearance: textfield; /* 1 */
294 | outline-offset: -2px; /* 2 */
295 | }
296 |
297 | /**
298 | * Remove the inner padding in Chrome and Safari on macOS.
299 | */
300 |
301 | [type="search"]::-webkit-search-decoration {
302 | -webkit-appearance: none;
303 | }
304 |
305 | /**
306 | * 1. Correct the inability to style clickable types in iOS and Safari.
307 | * 2. Change font properties to `inherit` in Safari.
308 | */
309 |
310 | ::-webkit-file-upload-button {
311 | -webkit-appearance: button; /* 1 */
312 | font: inherit; /* 2 */
313 | }
314 |
315 | /* Interactive
316 | ========================================================================== */
317 |
318 | /*
319 | * Add the correct display in Edge, IE 10+, and Firefox.
320 | */
321 |
322 | details {
323 | display: block;
324 | }
325 |
326 | /*
327 | * Add the correct display in all browsers.
328 | */
329 |
330 | summary {
331 | display: list-item;
332 | }
333 |
334 | /* Misc
335 | ========================================================================== */
336 |
337 | /**
338 | * Add the correct display in IE 10+.
339 | */
340 |
341 | template {
342 | display: none;
343 | }
344 |
345 | /**
346 | * Add the correct display in IE 10.
347 | */
348 |
349 | [hidden] {
350 | display: none;
351 | }
--------------------------------------------------------------------------------
/static/style.css:
--------------------------------------------------------------------------------
1 | * {
2 | box-sizing: border-box;
3 | }
4 |
5 |
6 | body {
7 | margin: auto;
8 | background-color: #222;
9 | color: #fff;
10 | font-family: 'Montserrat', sans-serif;
11 | width: min(95vmin, 740px);
12 | display: table;
13 | }
14 |
15 | .container {
16 | padding: 1.5vmin;
17 | background: linear-gradient(to bottom, #333, #444);
18 | border-radius: 1vmin;
19 | box-shadow: 0 0 1vmin rgba(0, 0, 0, 0.2);
20 | }
21 |
22 | .container h1 {
23 | margin-bottom: 0;
24 | margin-top: 0;
25 | }
26 |
27 | .title-button-container {
28 | display: flex;
29 | justify-content: space-between;
30 | align-items: center;
31 | margin-bottom: 1vmin;
32 | }
33 |
34 | .image-grid {
35 | display: grid;
36 | grid-template-columns: 1fr 1fr;
37 | /*grid-template-rows: 1fr 1fr; TODO use if display 4 images */
38 | grid-gap: 1vmin;
39 | }
40 |
41 | .image-container {
42 | display: flex;
43 | padding: 1vmin;
44 | transition: transform 0.5s;
45 | }
46 |
47 | @media (hover: hover) {
48 | .image-container:not(.after-clicked):hover {
49 | transform: scale(1.1);
50 | }
51 | }
52 |
53 | .image-grid img {
54 | max-width: 100%;
55 | max-height: 100%;
56 | box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 10px 0 rgba(0, 0, 0, 0.19);
57 | }
58 |
59 | input[type="text"] {
60 | width: 100%;
61 | padding: 0.5vmin 0 0 0;
62 | margin-bottom: 1vmin;
63 | box-sizing: border-box;
64 | border: 0.2vmin solid #ccc;
65 | border-radius: 1vmin;
66 | font-size: max(2vmin, 16px);
67 | }
68 |
69 | .button-container {
70 | display: flex;
71 | align-items: center;
72 | justify-content: flex-start;
73 | }
74 |
75 | button:not(.top-button) {
76 | display: inline-block;
77 | height: max(4vmin, 40px);
78 | width: max(4vmin, 40px);
79 | margin-bottom: 0;
80 | box-sizing: border-box;
81 | border: none;
82 | border-radius: 5px;
83 | font-size: 12px;
84 | font-family: 'Montserrat', sans-serif;
85 | font-weight: bold;
86 | background-color: #5d5d5d;
87 | /*text-shadow: 0 0 2px rgba(255, 255, 255, 0.5);*/
88 | color: #fff;
89 | cursor: pointer;
90 | /*margin-right: 20px;*/
91 | box-shadow: 0 0 10px rgba(0, 0, 0, 0.3);
92 | transition: background 0.5s ease;
93 | }
94 |
95 | button:hover:not(.top-button) {
96 | background-color: #6e6e6e;
97 | cursor: pointer;
98 | box-shadow: 0 0 20px rgba(0, 0, 0, 0.5);
99 | }
100 |
101 | .top-buttons {
102 | display: flex;
103 | align-items: center;
104 | justify-content: flex-start;
105 | padding: 2vmin;
106 | }
107 |
108 | .top-button {
109 | background-color: #333;
110 | color: #fff;
111 | border: none;
112 | border-radius: 5px;
113 | font-size: 16px;
114 | padding: 8px 15px;
115 | font-family: 'Montserrat', sans-serif;
116 | font-weight: bold;
117 | box-shadow: 0 0 10px rgba(0, 0, 0, 0.3);
118 | transition: background 0.5s ease;
119 | margin: 0 5px;
120 | }
121 |
122 | .top-button:hover {
123 | background-color: #444;
124 | cursor: pointer;
125 | box-shadow: 0 0 20px rgba(0, 0, 0, 0.5);
126 | }
127 |
128 | .progress-container {
129 | display: flex;
130 | align-items: center;
131 | width: 100%;
132 | min-height: max(2.5vmin, 20px);
133 | overflow: hidden;
134 | border-radius: 5px;
135 | position: relative;
136 | }
137 |
138 | .progress-bar-background {
139 | height: 100%;
140 | overflow: hidden;
141 | border-radius: 5px;
142 | transition: width 0.5s ease;
143 | }
144 |
145 | .progress-bar {
146 | flex: 1; /* add this */
147 | height: 100%;
148 | width: 0;
149 | background: #9fde95;
150 | border-radius: 5px;
151 | transition: width 0.5s ease;
152 | position: absolute;
153 | top: 0;
154 | left: 0;
155 | }
156 |
157 | .progress-bar-text {
158 | display: flex;
159 | font-family: 'Montserrat', sans-serif;
160 | align-items: center;
161 | justify-content: center;
162 | width: 100%;
163 | height: 100%;
164 | font-size: max(2vmin, 12px);
165 | z-index: 2;
166 | overflow: hidden;
167 | }
168 |
169 | .progress-bar-text p {
170 | font-size: max(2vmin, 14px);
171 | margin: 0.25vmin;
172 | }
173 |
174 |
175 | .after_clicked {
176 | /*pointer-events: none; !* Make element unclickable *!*/
177 | }
178 |
179 | .clicked {
180 | }
181 |
182 | .splash-screen {
183 | position: fixed; /* position the splash screen as a fixed element on top of the page */
184 | top: 0;
185 | width: min(95vmin, 740px);
186 | height: 100%;
187 | background-color: rgba(0, 0, 0, 0.9); /* set an almost solid black background */
188 | z-index: 999; /* set the z-index to ensure the splash screen is on top of other elements */
189 | display: flex; /* make the splash screen a flex container */
190 | align-items: center; /* center the splash screen vertically */
191 | justify-content: center;
192 | font-weight: 300;
193 | line-height: 1.5;
194 | letter-spacing: 0.5px;
195 | }
196 |
197 | @media (max-height: 768px) {
198 | .splash-screen {
199 | /* Use a smaller font size and spacing on smaller screens */
200 | font-size: 18px;
201 | line-height: 1.2;
202 | }
203 | }
204 |
205 | .splash-screen-inner {
206 | border-radius: 10px; /* add rounded corners */
207 | box-shadow: 0 0 20px rgba(0, 0, 0, 0.2); /* add a shadow */
208 | padding: 1vmin; /* add some padding */
209 | margin: 2vmin; /* center the splash screen horizontally and vertically */
210 | }
211 |
212 | .splash-screen h1 {
213 | text-align: center;
214 | font-size: 32px;
215 | }
216 |
217 | .splash-screen p {
218 | margin: 4vmin 0;
219 | font-size: 18px;
220 | text-align: justify;
221 | }
222 |
223 | .splash-screen li {
224 | margin: 4vmin 0;
225 | text-align: left;
226 | font-size: min(4vmin, 18px);
227 | }
228 |
229 | .splash-screen-buttons {
230 | display: flex; /* make the buttons a flex container */
231 | margin-top: 20px;
232 | }
233 |
234 | .splash-screen-button {
235 | display: inline-block;
236 | padding: 5px 20px;
237 | border: none;
238 | border-radius: 4px;
239 | font-size: 16px;
240 | background-color: #5d5d5d;
241 | box-shadow: 0 0 10px rgba(0, 0, 0, 0.3);
242 | /*text-shadow: 0 0 2px rgba(255, 255, 255, 0.5);*/
243 | color: #fff;
244 | cursor: pointer;
245 | transition: background 0.5s ease;
246 | flex: 1; /* set the flex value to 1 to evenly distribute the buttons in the container */
247 | text-align: center; /* center the button text */
248 | }
249 |
250 | .splash-screen-button:hover {
251 | background-color: #6e6e6e;
252 | cursor: pointer;
253 | box-shadow: 0 0 20px rgba(0, 0, 0, 0.5);
254 | }
255 |
256 | .a {
257 | display: inline-block;
258 | padding: 12px 20px;
259 | border: none;
260 | border-radius: 4px;
261 | font-size: 16px;
262 | background: linear-gradient(to bottom, #033964, #94a6f1);
263 | /*text-shadow: 0 0 2px rgba(255, 255, 255, 0.5);*/
264 | color: #fff;
265 | cursor: pointer;
266 | transition: background 0.5s ease;
267 | flex: 1; /* set the flex value to 1 to evenly distribute the buttons in the container */
268 | text-align: center; /* center the button text */
269 | }
270 |
271 | button.google-button {
272 | background-image: url('https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png');
273 | background-size: 50px 50px; /* set the width and height to 50 pixels */
274 | background-repeat: no-repeat;
275 | background-position: center top;
276 | }
277 |
278 | .footer {
279 | position: fixed; /* position the footer at the bottom of the viewport */
280 | bottom: 0; /* align the bottom edge of the footer with the bottom of the viewport */
281 | width: 100%; /* make the footer full width */
282 | background-color: #333; /* set the background color */
283 | color: #fff; /* set the text color */
284 | padding: 0px; /* add some padding to the footer */
285 | alignment: center;
286 | align-content: center;
287 | text-align: center;
288 | }
289 |
290 | h1 a {
291 | color: #c0e0c7;
292 | transition: color 0.5s;
293 | cursor: pointer;
294 | text-decoration: none;
295 | target: "_blank";
296 | }
297 |
298 | h1 a:hover {
299 | /* Change the font color on hover */
300 | color: #00b8d4;
301 | }
302 |
303 | a {
304 | color: #ec6565;
305 | transition: color 0.5s;
306 | cursor: pointer;
307 | text-decoration: none;
308 | target: "_blank";
309 | }
310 |
311 | a:hover {
312 | /* Change the font color on hover */
313 | color: #e71414;
314 | }
315 |
316 | li a {
317 | color: #c0e0c7;
318 | transition: color 0.5s;
319 | cursor: pointer;
320 | text-decoration: none;
321 | }
322 |
323 | li a:hover {
324 | /* Change the font color on hover */
325 | color: #00b8d4;
326 | }
327 |
328 | p a {
329 | color: #c0e0c7;
330 | transition: color 0.5s;
331 | cursor: pointer;
332 | text-decoration: none;
333 | }
334 |
335 | p a:hover {
336 | /* Change the font color on hover */
337 | color: #00b8d4;
338 | }
339 |
340 | footer a {
341 | color: #fff;
342 | transition: color 0.5s;
343 | }
344 |
345 | footer a:hover {
346 | /* Change the font color on hover */
347 | color: #00b8d4;
348 | }
349 |
350 | footer p {
351 | text-align: center;
352 | }
353 |
354 |
--------------------------------------------------------------------------------
/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Pick a Pic
9 |
10 |
11 |
12 |
13 |
14 |
Pick a Pic
15 |
16 | Pick a Pic is an app for collecting human feedback on AI-generated images for supporting academic research
17 | in AI. Users can generate images from text, and then rank them. Being an academic community effort, all
18 | code , data , and models
20 | resulting from this research project will be completely open-sourced.
21 |
22 |
23 | Next
24 |
25 |
26 |
27 |
Legal Notice
28 |
29 | This demo is intended for research purposes only.
30 | The authors are not responsible for any misuse of the demo.
31 | By using the demo, the users grant consent for their prompts, interactions, and generated images to
32 | be collected and eventually released into the public domain.
33 |
34 | By using the demo, the users agree not to generate content that others may find to be offensive,
35 | upsetting, or inappropriate.
36 |
37 |
38 |
39 | Back
40 | Accept
41 |
42 |
43 |
44 |
Pick a Pic
45 |
To ensure that you are a real human we kindly request users to authenticate via Google or Discord before
46 | generating and participating in the Pick a Pic project.
47 |
48 |
49 | Close
50 | Google Sign in
51 | Discord Sign in
52 |
53 |
54 |
55 |
Pick a Pic
56 |
57 | Mission : We are on a mission to build the largest open-sourced and publicly available
58 | human-feedback for text-to-image dataset.
59 |
60 |
61 | Code : All of our code is open-source. This includes the web app repository and the model inference repository.
64 |
65 |
66 | Model : Currently, we use
69 | dreamlike-photoreal-2.0 , and SDXL variants.
70 |
71 |
72 | Data : We will periodically update this dataset
75 | with the new collected data.
76 |
77 |
78 | Reach Out : If you would like to help, suggest, or chat with us, please reach out on discord or by mail .
81 |
82 |
83 | Negative Prompts : Inside square brackets. E.g. "Hello [world]" will result in the prompt:
84 | "Hello" and negative prompt "world".
85 |
86 |
87 | Return to demo
88 |
89 |
90 |
91 |
92 |
Pick a Pic
93 | We thank
StabilityAI and the
Google TRC Program
95 | for their generous support throughout this project.
96 |
97 | Return to demo
98 |
99 |
100 |
101 |
102 |
103 |
149 |
150 |
181 |
182 |
Clear
185 | {# #}
187 | {# #}
188 | {# #}
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
202 |
203 |
204 |
205 |
207 |
208 |
209 |
211 |
212 |
213 |
214 | No image is significantly better than the
216 | other
217 |
218 |
219 |
220 |
221 |
Pick a Pic
222 |
OMG. Given our current resources, there are too many users. Please refresh the page in one minute.
223 |
224 |
225 |
226 |
227 |
228 |
Pick a Pic
229 |
We are currently limiting the number of generations per user to 1000 generated images per week. We might increase it in the future.
230 | For any questions, please contact us at mail .
232 |
233 |
234 |
235 |
236 |
237 |
Pick a Pic
238 |
OMG. Something went wrong. Please refresh the page and try again.
239 |
240 |
241 |
242 |
243 |
244 |
245 |
1039 |
1040 |
1041 |
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yuvalkirstain/heroku_app/b4ccc29e450682e8697aa8f5a9a955e915cd467d/utils/__init__.py
--------------------------------------------------------------------------------
/utils/logging_utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from pydantic import BaseModel
3 | from logging.config import dictConfig
4 |
5 |
6 | class LogConfig(BaseModel):
7 | """Logging configuration to be set for the server"""
8 |
9 | LOGGER_NAME: str = "mycoolapp"
10 | LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(message)s"
11 | LOG_LEVEL: str = "DEBUG"
12 |
13 | # Logging config
14 | version = 1
15 | disable_existing_loggers = False
16 | formatters = {
17 | "default": {
18 | "()": "uvicorn.logging.DefaultFormatter",
19 | "fmt": LOG_FORMAT,
20 | "datefmt": "%Y-%m-%d %H:%M:%S",
21 | },
22 | }
23 | handlers = {
24 | "default": {
25 | "formatter": "default",
26 | "class": "logging.StreamHandler",
27 | "stream": "ext://sys.stderr",
28 | },
29 | }
30 | loggers = {
31 | "mycoolapp": {"handlers": ["default"], "level": LOG_LEVEL},
32 | }
33 |
34 |
35 | class EndpointFilter(logging.Filter):
36 | def filter(self, record: logging.LogRecord) -> bool:
37 | return record.getMessage().find("/get_images_status/") == -1
38 |
39 |
40 | dictConfig(LogConfig().dict())
41 | logger = logging.getLogger("mycoolapp")
42 | logging.getLogger("uvicorn.access").addFilter(EndpointFilter())
--------------------------------------------------------------------------------