File size: 1,115 Bytes
09cd3a8
 
 
 
 
 
 
 
 
 
 
 
 
264deb4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09cd3a8
 
41d9b0f
09cd3a8
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
ifneq (,$(wildcard ./.env))
    include .env
	# assume includes OPENAI_API_KEY for LLM inference
    export
endif

.PHONY: help
.DEFAULT_GOAL := help

help: ## get a list of all the targets, and their short descriptions
	@# source for the incantation: https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
	@grep -E '^[a-zA-Z_-]+:.*?## .*$$' Makefile | awk 'BEGIN {FS = ":.*?##"}; {printf "\033[36m%-12s\033[0m %s\n", $$1, $$2}'

API_URL=https://$(HUGGINGFACE_USERNAME)-ask-fsdl.hf.space/run
PROMPT=What is PyTorch?
remote_request: ## send a request to the instance on Hugging Face
	@if [ -z $$(echo $(HUGGINGFACE_USERNAME)) ]; then\
		echo "###";\
		echo "# 🥞: Add a HUGGINGFACE_USERNAME to the .env file to run remotely";\
		echo "###";\
	else\
	 	echo "$(PROMPT)";\
		echo "###";\
		echo "# 🥞: Running inference at $(API_URL)";\
		echo "###";\
		(echo '{ "data": ["'$(PROMPT)'"]}') \
			| curl -s -X POST $(API_URL)/predict -H 'Content-Type: application/json' -d @-;\
	fi

serve: ## serve this locally
	gradio app.py

environment:  ## installs requirements
	pip install -q -r requirements.txt