Skip to content
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,5 @@ config.json
venv/

tmp/

*.db
12 changes: 8 additions & 4 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,16 @@ services:
- ~/.ssh/id_ed25519-tunnel:/app/ssh_key
- ~/.ssh/known_hosts:/app/known_hosts
- "/etc/cups/ppd/:/etc/cups/ppd"
- quasar_data:/tmp/
tty: true

# we attach the print container to an external docker
# network called "poweredge". we do this so a prometheus
# container can pull metrics from the server over HTTP
# and relay the metrics to a Grafana dashboard.
volumes:
quasar_data:

# we attach the print container to an external docker
# network called "poweredge". we do this so a prometheus
# container can pull metrics from the server over HTTP
# and relay the metrics to a Grafana dashboard.
networks:
default:
external:
Expand Down
150 changes: 150 additions & 0 deletions printer/gerard.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
"""
## changes for rn
- sqlite_helpers.py should only contain code that has to do with inserting and querying job ids
- follow the below doc to create an iterator that will return an incrementing number
https://docs.google.com/document/d/1OUAKLbre3m9d6-gywAfYpqgTGErKRRWy_UAHWk1D67A/edit?tab=t.0#heading=h.kce7czbd2le8
- we will use the above to create the numerical suffix for the job id, not sqlite
- abstract the below code to a function called create_print_job. it will return a string print job id, or None if there was some error
```
print_job = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
print_job.wait()

if print_job.returncode != 0:
logging.error(
f"command returned code {print_job.returncode} stderr: {print_job.stderr.read()} stdout: {print_job.stdout.read()}"
)
return None
try:
print_id = print_job.stdout.read().strip().split(" ")[3]
```
- once we have the above working, add an if statement for if we are in dev mode
- if we are in dev mode, create a fake print job and return it
- make sure ur code writes this dynamically generated job id to sqlite

**make a pr with just the above changes, the movement of job id to a function and the creation of the iterator class**
once this is done, we will discuss how to mock lpstat job ids, like how to match up the numbers with what the iterator returns

"""


import logging
import sqlite3
import time
import subprocess
import sqlite_helpers

LPSTAT_CMD = "lpstat -o HP_LaserJet_p2015dn_Right"
DEBUG_PTH = "./tmp.db"
DEBUG = True
SLEEP_TIME = 1

running_jobs = set()
current_jobs = set()
logger = logging.getLogger(__name__)

class IDIterator:

def __init__(self):
self._current = 0
def __next__(self):
id = self._current
self._current += 1
return id

iter = IDIterator()


def create_print_job(cmd=""):
if DEBUG:
job_id = f"HP_LaserJet_p2015dn_Right-{next(iter)}"
sqlite_helpers.insert_print_job(DEBUG_PTH, job_id)
return job_id

print_job = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
print_job.wait()

if print_job.returncode != 0:
logging.error(
f"command returned code {print_job.returncode} stderr: {print_job.stderr.read()} stdout: {print_job.stdout.read()}"
)
return None
try:
print_id = print_job.stdout.read().strip().split(" ")[3]
sqlite_helpers.insert_print_job(DEBUG_PTH, print_id)
return print_id
except Exception as e:
logging.error(f"There was an error printing: {e}")
return None



def print_db(sqlite_file: str):
sql_query = "SELECT * FROM logs"
db = sqlite3.connect(sqlite_file)
cursor = db.cursor()
cursor.execute(sql_query)
print("-------------------------------")
for x in cursor.fetchall():
print(x)
print("-------------------------------")


def update_completed_jobs(sqlite_file):
global running_jobs, current_jobs
db = sqlite3.connect(sqlite_file)
cursor = db.cursor()

# everything in the previous set that IS NOT in the current set
completed_jobs = running_jobs.difference(current_jobs)
completed_job_ids = [(job_id,) for job_id in completed_jobs]

sql_update = "UPDATE logs SET status = 'completed' WHERE job_id = ?"
cursor.executemany(sql_update, completed_job_ids)
db.commit()

running_jobs.clear()
running_jobs.update(current_jobs)
current_jobs.clear()

def query_lpstat(sqlite_file, cmd):
global running_jobs, current_jobs
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
p.wait()

if p.returncode != 0:
print(p.stderr.read())
raise subprocess.CalledProcessError(p.returncode, cmd)

output = p.stdout.read().strip()
if len(output) == 0:
update_completed_jobs(sqlite_file)
return
# 2 things at once; add new jobs to new one while also retrieving current job_ids
jobs = output.split("\n")
for job in jobs:
job_id = job.strip().split(" ")[0]
current_jobs.add(job_id)
running_jobs.add(job_id)

update_completed_jobs(sqlite_file)

def poll_lpstat(sqlite_file):
while True:
try:
query_lpstat(sqlite_file, LPSTAT_CMD)
except Exception as e:
logging.error(f"Error occured: {e}")
time.sleep(SLEEP_TIME)

51 changes: 24 additions & 27 deletions printer/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import time
import uuid
import collector
import gerard

from fastapi import FastAPI, File, Form, HTTPException, UploadFile
from fastapi.middleware.cors import CORSMiddleware
Expand All @@ -15,6 +16,7 @@
import uvicorn

from metrics import MetricsHandler
import sqlite_helpers


metrics_handler = MetricsHandler.instance()
Expand Down Expand Up @@ -72,6 +74,11 @@ def get_args() -> argparse.Namespace:
help="update sleepy time, default is 2mins",
default=2,
)
parser.add_argument(
"--database-file-path",
help="path to sqlite database file",
default="/tmp/jobs.db"
)
return parser.parse_args()


Expand Down Expand Up @@ -118,34 +125,13 @@ def send_file_to_printer(
f"server is in development mode, command would've been `{command}`"
)
return None

job_id = gerard.create_print_job(command)
if not job_id:
return ""
return job_id

print_job = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
print_job.wait()

if print_job.returncode != 0:
logging.error(
f"command returned code {print_job.returncode} stderr: {print_job.stderr.read()} stdout: {print_job.stdout.read()}"
)
return None
try:
print_id = print_job.stdout.read().strip().split(" ")[3]
logging.info(f"extracted print id is {print_id}")
return print_id
except Exception:
logging.exception(
f"failed to extract print id from stdout: {print_job.stdout.read()}"
)
# need to find a better value to return when the command exited
# with code 0 but the output could not be parsed for a job id.
return ''



def maybe_delete_pdf(file_path):
if args.dont_delete_pdfs:
logging.info(
Expand Down Expand Up @@ -221,6 +207,17 @@ async def read_item(
daemon=True,
)
t.start()

sqlite_helpers.maybe_create_table(args.database_file_path)

t2 = threading.Thread(
target=sqlite_helpers.poll_lpstat,
args=(
args.database_file_path
),
daemon=True
)
t2.start()

if not args.development and os.path.exists(args.config_json_path):
thread = threading.Thread(
Expand Down
55 changes: 55 additions & 0 deletions printer/sqlite_helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from datetime import datetime
import logging
import sqlite3
import datetime

logger = logging.getLogger(__name__)

def maybe_create_table(sqlite_file: str) -> bool:
db = sqlite3.connect(sqlite_file)
cursor = db.cursor()

try:
create_table_query = """
CREATE TABLE IF NOT EXISTS logs (
date DATETIME DEFAULT CURRENT_TIMESTAMP,
job_id TEXT NOT NULL,
status TEXT CHECK (status IN ('pending', 'completed')) NOT NULL DEFAULT 'pending',
PRIMARY KEY (date, job_id)
)
"""

cursor.execute(create_table_query)
db.commit()
return True
except Exception:
logger.exception("Unable to create printer table")
return False

def insert_print_job(sqlite_file: str, job_id: str):
try:
with sqlite3.connect(sqlite_file, timeout=10.0) as db:
cursor = db.cursor()
timestamp = datetime.datetime.now()
db = sqlite3.connect(sqlite_file)
sql = "INSERT INTO logs (job_id) VALUES (?)"
cursor.execute(sql, (job_id,))
db.commit()
return timestamp
except sqlite3.IntegrityError:
return None
except Exception:
logger.exception("Inserting print job had an error")
return None

def get_logs(sqlite_file):
db = sqlite3.connect(sqlite_file)
cursor = db.cursor()

sql = f"""
SELECT * FROM logs
ORDER BY date
"""
cursor.execute(sql)
result = cursor.fetchall()
return result
Loading