parent
368d7bbe90
commit
c9a137a9a9
@ -0,0 +1,96 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
.venv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
@ -0,0 +1,14 @@
|
||||
FROM python:alpine
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
|
||||
RUN pip install --no-cache-dir --requirement requirements.txt
|
||||
|
||||
RUN addgroup -g 9999 lilia
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
USER nobody:lilia
|
||||
|
||||
ENTRYPOINT ["python", "app.py"]
|
@ -0,0 +1,111 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import flask
|
||||
import flask_apscheduler
|
||||
import flask_restful
|
||||
import flask_restful.fields
|
||||
import flask_restful.reqparse
|
||||
import sqlalchemy
|
||||
import sqlalchemy.engine
|
||||
|
||||
from db import db, Product
|
||||
|
||||
|
||||
app = flask.Flask(__name__)
|
||||
app.logger.setLevel(logging.INFO)
|
||||
app.config.update(
|
||||
ERROR_404_HELP=False,
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS=False,
|
||||
SQLALCHEMY_DATABASE_URI=os.getenv('SQLALCHEMY_DATABASE_URI'),
|
||||
SCHEDULER_TIMEZONE='UTC',
|
||||
SCHEDULER_JOBS=[
|
||||
dict(id='sync_products',
|
||||
func='sync:Sync.sync_products',
|
||||
args=(app, db),
|
||||
max_instances=1,
|
||||
trigger='interval',
|
||||
seconds=300)])
|
||||
|
||||
if app.config.get('SQLALCHEMY_DATABASE_URI', '').startswith('sqlite://'):
|
||||
@sqlalchemy.event.listens_for(sqlalchemy.engine.Engine, 'connect')
|
||||
def set_sqlite_pragma(dbapi_connection, connection_record):
|
||||
dbapi_connection.execute('PRAGMA journal_mode=WAL')
|
||||
dbapi_connection.execute('PRAGMA synchronous=NORMAL')
|
||||
|
||||
db.init_app(app)
|
||||
db.create_all(app=app)
|
||||
|
||||
scheduler = flask_apscheduler.APScheduler()
|
||||
scheduler.init_app(app)
|
||||
|
||||
api = flask_restful.Api(app)
|
||||
|
||||
|
||||
product_fields = {
|
||||
'id': flask_restful.fields.Integer(),
|
||||
'name': flask_restful.fields.String(),
|
||||
'product_name': flask_restful.fields.String(),
|
||||
'price': flask_restful.fields.String(),
|
||||
'time_left': flask_restful.fields.String(),
|
||||
'days_left': flask_restful.fields.Integer(),
|
||||
'url': flask_restful.fields.String(),
|
||||
'image_url': flask_restful.fields.String(),
|
||||
}
|
||||
|
||||
|
||||
filter_parser = flask_restful.reqparse.RequestParser()
|
||||
filter_parser.add_argument('filter', type=str)
|
||||
filter_parser.add_argument('type', type=str)
|
||||
filter_parser.add_argument('sort_by', type=str)
|
||||
filter_parser.add_argument('sort_order', type=str)
|
||||
filter_parser.add_argument('page_number', type=int)
|
||||
filter_parser.add_argument('page_size', type=int)
|
||||
|
||||
|
||||
class ProductResource(flask_restful.Resource):
|
||||
@flask_restful.marshal_with(product_fields)
|
||||
def get(self, id):
|
||||
q = db.session.query(Product).filter(Product.id == id)
|
||||
product = q.first()
|
||||
if not product:
|
||||
flask_restful.abort(404, message='Product {0} does not exist'.format(id))
|
||||
return product, 200
|
||||
|
||||
|
||||
class ProductsResource(flask_restful.Resource):
|
||||
@flask_restful.marshal_with(product_fields)
|
||||
def get(self):
|
||||
args = filter_parser.parse_args()
|
||||
q = db.session.query(Product)
|
||||
if args['filter']:
|
||||
q = q.filter(Product.name.ilike('%{}%'.format(args['filter'])))
|
||||
if args['type']:
|
||||
q = q.filter(Product.product_name.ilike('%{}%'.format(args['type'])))
|
||||
count = q.count()
|
||||
if args['sort_order'] == 'random':
|
||||
q = q.order_by(sqlalchemy.func.random())
|
||||
elif args['sort_by']:
|
||||
col = getattr(Product, args['sort_by'], None)
|
||||
if col:
|
||||
if args['sort_order']:
|
||||
order_by = getattr(col, args['sort_order'], None)
|
||||
if order_by:
|
||||
q = q.order_by(order_by())
|
||||
else:
|
||||
q = q.order_by(col)
|
||||
if args['page_size']:
|
||||
q = q.limit(args['page_size'])
|
||||
if args['page_number'] and args['page_size']:
|
||||
q = q.offset(args['page_number'] * args['page_size'])
|
||||
products = q.all()
|
||||
return products, 200, {'X-Total-Count': count}
|
||||
|
||||
|
||||
api.add_resource(ProductResource, '/products/<int:id>')
|
||||
api.add_resource(ProductsResource, '/products')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
scheduler.start()
|
||||
app.run(host='0.0.0.0', threaded=True, debug=False)
|
@ -0,0 +1,17 @@
|
||||
import flask_sqlalchemy
|
||||
|
||||
|
||||
db = flask_sqlalchemy.SQLAlchemy(session_options=dict(autoflush=False))
|
||||
|
||||
|
||||
class Product(db.Model):
|
||||
__tablename__ = 'products'
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String)
|
||||
product_name = db.Column(db.String)
|
||||
price = db.Column(db.String)
|
||||
time_left = db.Column(db.String)
|
||||
days_left = db.Column(db.Integer)
|
||||
url = db.Column(db.String)
|
||||
image_url = db.Column(db.String)
|
@ -0,0 +1,5 @@
|
||||
Flask
|
||||
Flask-APScheduler
|
||||
Flask-RESTful
|
||||
Flask-SQLAlchemy
|
||||
requests-futures
|
@ -0,0 +1,47 @@
|
||||
import datetime
|
||||
import os
|
||||
|
||||
from db import Product
|
||||
from teespring import Teespring
|
||||
|
||||
|
||||
class Sync(object):
|
||||
@staticmethod
|
||||
def _get(d, *keys, default=None):
|
||||
try:
|
||||
result = None
|
||||
for key in keys:
|
||||
if result:
|
||||
if isinstance(result, list):
|
||||
result = result[key]
|
||||
else:
|
||||
result = result.get(key, default)
|
||||
else:
|
||||
result = d.get(key, default)
|
||||
return result
|
||||
except (KeyError, IndexError):
|
||||
return default
|
||||
|
||||
@classmethod
|
||||
def sync_products(cls, app, db):
|
||||
app.logger.info('Starting synchronization of products')
|
||||
with app.app_context():
|
||||
teespring = Teespring(os.getenv('TEESPRING_STORE_NAME'))
|
||||
for prod in teespring.fetch_products():
|
||||
id = cls._get(prod, 'id')
|
||||
if not id:
|
||||
continue
|
||||
q = db.session.query(Product).filter(Product.id == id)
|
||||
product = q.first()
|
||||
if not product:
|
||||
product = Product(id=id)
|
||||
product.name = cls._get(prod, 'name')
|
||||
product.product_name = cls._get(prod, 'product_name')
|
||||
product.price = cls._get(prod, 'price')
|
||||
product.time_left = cls._get(prod, 'time_left')
|
||||
product.days_left = cls._get(prod, 'days_left')
|
||||
product.url = cls._get(prod, 'url')
|
||||
product.image_url = cls._get(prod, 'image_url')
|
||||
db.session.add(product)
|
||||
db.session.commit()
|
||||
app.logger.info('Synchronization of products completed')
|
@ -0,0 +1,37 @@
|
||||
import json
|
||||
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
from requests_futures.sessions import FuturesSession
|
||||
|
||||
|
||||
BASE_URL = 'https://teespring.com'
|
||||
|
||||
|
||||
class Teespring(object):
|
||||
def __init__(self, store_name):
|
||||
self.store_name = store_name
|
||||
|
||||
def fetch_products(self):
|
||||
session = FuturesSession()
|
||||
def get_products(page):
|
||||
url = '{0}/api/stores/{1}/store_products'.format(BASE_URL, self.store_name)
|
||||
params = dict(page=page)
|
||||
return session.get(url, params=params, headers={'Accept': 'application/json'})
|
||||
result = []
|
||||
page = 1
|
||||
while True:
|
||||
request = get_products(page)
|
||||
r = request.result()
|
||||
if not r.ok:
|
||||
return []
|
||||
data = r.json()
|
||||
for product in data.get('products', []):
|
||||
product['url'] = BASE_URL + product['url']
|
||||
result.append(product)
|
||||
next_url = data.get('next')
|
||||
if not next_url:
|
||||
break
|
||||
q = parse_qs(urlparse(next_url).query)
|
||||
page = q.get('page', [])[0]
|
||||
return result
|
Loading…
Reference in new issue