Dispatching Jobs
The dispatching and collecting of orders will be done by the Barista. We’ll implement this as a simple Python script using peewee for the database abstraction.
Model
We model everything similar to the Servitør.
import os
import peewee
import mysql.connector
MYSQL_USER = os.environ.get('MYSQL_USER','root')
MYSQL_PASS = os.environ.get('MYSQL_PASS','root')
MYSQL_HOST = os.environ.get('MYSQL_HOST','localhost')
MYSQL_PORT = os.environ.get('MYSQL_PORT','3306')
MYSQL_DB = os.environ.get('MYSQL_DB','cafe')
mysqldb = peewee.MySQLDatabase(MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS,
host=MYSQL_HOST, port=int(MYSQL_PORT))
class Order(peewee.Model):
ID = peewee.CharField(column_name='id')
OrderReceived = peewee.CharField(column_name='order_received', null=True)
OrderReady = peewee.CharField(column_name='order_ready', null=True)
OrderRetrieved = peewee.CharField(column_name='order_retrieved', null=True)
OrderSize = peewee.IntegerField(column_name='order_size')
OrderBrewed = peewee.IntegerField(column_name='order_brewed')
class Meta:
table_name = 'orders'
database = mysqldb
class CoffeeListItem(peewee.Model):
ID = peewee.CharField(column_name='job_id')
Product = peewee.CharField(column_name='product')
OrderID = peewee.CharField(column_name='order_id')
OrderReceived = peewee.CharField(column_name='order_received')
Machine = peewee.CharField(column_name='machine', null=True)
JobStarted = peewee.CharField(column_name='job_started', null=True)
JobReady = peewee.CharField(column_name='job_ready', null=True)
JobRetrieved = peewee.CharField(column_name='job_retrieved', null=True)
class Meta:
table_name = 'coffee_list_items'
database = mysqldb
We’ll also accept the available coffee machines as an array from the environment (import json)
coffee_machines = json.loads(os.environ.get('COFFEE_MACHINES','["http://localhost:1337"]'))
Dispatching new jobs
This requires the requests and datetime libraries
import requests
from datetime import datetime
Let’s first find a coffee machine ready to take a new job.
for pot in coffee_machines:
machineStatus = requests.get(pot+"/status")
if (machineStatus.status_code == 200):
Now we find a job in the database that isn’t assigned to a machine just yet
for job in CoffeeListItem.select().where(CoffeeListItem.Machine.is_null(True)).limit(1):
We dispatch the job and store the answer in job record in the database ()
response = requests.post(pot+"/start-job", data=json.dumps({"product": job.Product}), headers={"Content-Type":"application/json"})
jsonResponse = response.json()
job.Machine = pot
job.JobID = jsonResponse["jobId"]
job.JobReady = jsonResponse["jobReady"]
print("job "+job.JobID+" sent to "+job.Machine+", ready at "+job.JobReady)
job.JobStarted = datetime.utcnow().isoformat(timespec='seconds')
job.save()
Collecting finished jobs
We’ll need to do this before dispatching of course.
We check the status of any job that is dispatched but not retrieved yet.
for job in CoffeeListItem.select().where(CoffeeListItem.Machine.is_null(False) & CoffeeListItem.JobRetrieved.is_null(True)):
We check whether the job is expected ready:
jobReady = datetime.fromisoformat(job.JobReady).isoformat(timespec='seconds')
present = datetime.utcnow().isoformat(timespec='seconds')
if (jobReady < present):
Then we retrieve it
response = requests.get(job.Machine+"/retrieve-job/"+job.ID)
jsonResponse = response.json()
job.JobRetrieved = datetime.utcnow().isoformat(timespec='seconds')
job.save()
print("job "+job.ID+" retrieved from "+job.Machine+" at "+job.JobRetrieved)
Now we store the updated order info
order = Order.select().where(Order.ID == job.OrderID).get()
order.OrderBrewed += 1
If this is the last job, we mark the order as done
if order.OrderSize == order.OrderBrewed:
order.OrderReady = job.JobRetrieved
print("order "+order.ID+" ready")
And that’s the order updated in either case
order.save()
Further work
Exercise
Implement DNS resolution of each coffee pod to possibly multiple A records.
Exercise
Implement a proper logging mechanism, e.g. the logging library.
Exercise
Implement X-REQUEST-IDs to be sent along with each request as headers and logged.