working dual webserver/daemon

This commit is contained in:
James Turk 2011-07-29 16:53:29 -04:00
parent db718d3190
commit 08aef40662
2 changed files with 33 additions and 58 deletions

View File

@ -1,9 +1,10 @@
import multiprocessing import multiprocessing
import signal
import threading import threading
import time import time
import urllib import urllib
import flask
from oyster.client import Client from oyster.client import Client
from oyster.web import app from oyster.web import app
@ -32,49 +33,6 @@ class UpdateProcess(multiprocessing.Process):
self.task_q.task_done() self.task_q.task_done()
class Producer(threading.Thread):
def __init__(self, queue, sleep_length):
super(Producer, self).__init__()
self.queue = queue
self.sleep_length = sleep_length
self.client = Client()
self._stop = threading.Event()
def run(self):
# go forever
while not self.stopped():
# get everything overdue and put it into the queue
next_set = self.client.get_update_queue()
if next_set:
for item in next_set:
self.queue.put(item)
# do all queued work
self.queue.join()
else:
# allow for a quiet period if queue is exhausted
time.sleep(self.sleep_length)
def stop(self):
self._stop.set()
def stopped(self):
return self._stop.is_set()
def main_process():
num_processes = 4
work_queue = multiprocessing.JoinableQueue()
producer = Producer(work_queue, 60)
workers = [UpdateProcess(work_queue) for i in xrange(num_processes)]
for w in workers:
w.start()
producer.start()
def flask_process(): def flask_process():
app.run(debug=True) app.run(debug=True)
@ -84,20 +42,31 @@ def main():
debug = True debug = True
work_queue = multiprocessing.JoinableQueue() work_queue = multiprocessing.JoinableQueue()
producer = Producer(work_queue, 60)
workers = [UpdateProcess(work_queue) for i in xrange(num_processes)] workers = [UpdateProcess(work_queue) for i in xrange(num_processes)]
server = multiprocessing.Process(target=flask_process) server = multiprocessing.Process(target=flask_process)
def cleanup(signal, frame): # give flask access to our work_queue
for worker in workers: app.work_queue = work_queue
worker.terminate()
producer.stop()
server.terminate()
for worker in workers: for worker in workers:
worker.start() worker.start()
producer.start()
server.start() server.start()
client = Client()
while True:
# get everything overdue and put it into the queue
next_set = client.get_update_queue()
if next_set:
for item in next_set:
work_queue.put(item)
# do all queued work
work_queue.join()
else:
# allow for a quiet period if queue is exhausted
time.sleep(60)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -1,19 +1,25 @@
from flask import Flask, make_response import flask
from oyster.client import Client from oyster.client import Client
app = Flask('oyster') app = flask.Flask('oyster')
client = Client()
@app.route('/doc/') @app.route('/status/')
def doc_list(): def doc_list():
pass status = {
'queue_size': app.work_queue.qsize(),
'tracking': client.db.tracked.count(),
'need_update': client.get_update_queue_size(),
}
return flask.jsonify(**status)
@app.route('/doc/<path:url>/<version>') @app.route('/doc/<path:url>/<version>')
def show_doc(url, version): def show_doc(url, version):
c = Client()
if version == 'latest': if version == 'latest':
version = -1 version = -1
doc = c.get_version(url, version) doc = client.get_version(url, version)
resp = make_response(doc.read()) resp = flask.make_response(doc.read())
resp.headers['content-type'] = doc.mimetype resp.headers['content-type'] = doc.mimetype
return resp return resp