gunicorn run:app --workers=9
gunicorn run:app --workers=9 --worker-class=meinheld.gmeinheld.MeinheldWorker
Macbook Pro 2015 Python 3.7
| Framework | Server | Req/s | Max latency | +/- Stdev |
|---|
| import os | |
| import pickle | |
| import warnings | |
| import numpy as np | |
| import pandas as pd | |
| from sklearn.model_selection import train_test_split | |
| from tensorflow.keras.callbacks import EarlyStopping | |
| from tensorflow.keras.layers import Dense | |
| from tensorflow.keras.layers import Dropout |
| /* Useful celery config. | |
| app = Celery('tasks', | |
| broker='redis://localhost:6379', | |
| backend='redis://localhost:6379') | |
| app.conf.update( | |
| CELERY_TASK_RESULT_EXPIRES=3600, | |
| CELERY_QUEUES=( | |
| Queue('default', routing_key='tasks.#'), |
| ''' | |
| A python script which starts celery worker and auto reload it when any code change happens. | |
| I did this because Celery worker's "--autoreload" option seems not working for a lot of people. | |
| ''' | |
| import time | |
| from watchdog.observers import Observer ##pip install watchdog | |
| from watchdog.events import PatternMatchingEventHandler | |
| import psutil ##pip install psutil | |
| import os |
gunicorn run:app --workers=9
gunicorn run:app --workers=9 --worker-class=meinheld.gmeinheld.MeinheldWorker
Macbook Pro 2015 Python 3.7
| Framework | Server | Req/s | Max latency | +/- Stdev |
|---|
| #!/bin/sh | |
| # This is free and unencumbered software released into the public domain. | |
| # | |
| # Anyone is free to copy, modify, publish, use, compile, sell, or | |
| # distribute this software, either in source code form or as a compiled | |
| # binary, for any purpose, commercial or non-commercial, and by any | |
| # means. | |
| # | |
| # In jurisdictions that recognize copyright laws, the author or authors |
| import math | |
| import random | |
| import csv | |
| import numpy as np | |
| import cProfile | |
| import hashlib | |
| memoization = {} | |
| class Clustering: | |
| def k_means_clustering(self, n, s=1.0): | |
| """ | |
| This method performs the K-means clustering algorithm on the data for n iterations. This involves updating the | |
| centroids using the mean-shift heuristic n-times and reassigning the patterns to their closest centroids. | |
| :param n: number of iterations to complete | |
| :param s: the scaling factor to use when updating the centroids | |
| pick on which has a better solution (according to some measure of cluster quality) | |
| """ |
| class Clustering: | |
| """ | |
| An instance of the Clustering is a solution i.e. a particular partitioning of the (heterogeneous) data set into | |
| homogeneous subsets. For Centroid based clustering algorithms this involves looking at each pattern and assigning | |
| it to it's nearest centroid. This is done by calculating the distance between each pattern and every centroid and | |
| selecting the one with the smallest distance. Here we use are using fractional distance with the default parameters. | |
| :param d: dimensionality of the input patterns | |
| :param k: the pre-specified number of clusters & centroids | |
| :param z: the patterns in the data set |
| class ClusteringQuality: | |
| """ | |
| Instances of this class implement the two measures of clustering quality discussed in the article, namely the davies | |
| bouldin index and the silhouette index. It also implements a number of useful helper methods. | |
| :param solution: the clustering solution of type Clustering | |
| :param minimum: the minimum distance allowable | |
| """ | |
| def __init__(self, solution, minimum): | |
| """ |
| from cx_Freeze import setup, Executable | |
| include_files = [ 'app/templates/', | |
| 'app/static/',] | |
| # Note: without 'jinja2.ext' in this list, we won't get the templates working. | |
| include = [ 'jinja2', 'jinja2.ext',] | |
| flaskapp = Executable(script="run.py", | |
| base="Win32GUI", |