File tree Expand file tree Collapse file tree 12 files changed +154
-178
lines changed Expand file tree Collapse file tree 12 files changed +154
-178
lines changed Original file line number Diff line number Diff line change @@ -13,7 +13,7 @@ Ensure your `settings.json` has:
13131 . Run ` poetry install `
14142 . Run ` poetry shell `
15153 . Open up vscode command palette (command + shift + p, and select the .venv folder that was created in this directory as the interpreter)
16- 4 . Run ` uvicorn fastapi_starter .app:app --reload `
16+ 4 . Run ` uvicorn fast_api_starter .app:app --reload `
17175 . Curl the streaming endpoint:
1818 ```
1919 curl -X GET -H "Content-Type: application/json" http://localhost:8000/extract_resume
Load Diff This file was deleted.
Original file line number Diff line number Diff line change @@ -10,23 +10,18 @@ class Education {
1010 year int
1111}
1212
13- function ExtractResume {
14- input string
15- output Resume
16- }
17-
18- impl<llm, ExtractResume> version1 {
13+ function ExtractResume(raw_text: string) -> Resume {
1914 client GPT4
2015 prompt #"
2116 Parse the following resume and return a structured representation of the data in the schema below.
2217
2318 Resume:
2419 ---
25- {#input }
20+ {{raw_text} }
2621 ---
2722
2823 Output JSON format (only include these fields, and no others):
29- {#print_type(output) }
24+ {{ ctx.output_format(prefix=null) } }
3025
3126 Output JSON:
3227 "#
Original file line number Diff line number Diff line change @@ -6,23 +6,42 @@ enum Category {
66 Question
77}
88
9- function ClassifyMessage {
10- input (message: string, message_date: string)
11- output Category[]
9+ class Message {
10+ role Role
11+ content string
1212}
1313
14- impl<llm, ClassifyMessage> level1 {
14+ enum Role {
15+ Customer
16+ Assistant
17+ }
18+
19+ template_string PrintMessage(msg: Message, prefix: string?) #"
20+ {{ _.chat('user' if msg.role == "Customer" else 'assistant') }}
21+ {% if prefix %}
22+ {{ prefix }}
23+ {% endif %}
24+ {{ msg.content }}
25+ "#
26+
27+ function ClassifyMessage(convo: Message[]) -> Category[] {
1528 client GPT4
1629 prompt #"
17- Classify the following INPUT into following:
18- {#print_enum(Category)}
30+ {#
31+ Prompts are auto-dedented and trimmed.
32+ We use JINJA for our prompt syntax
33+ (but we added some static analysis to make sure it's valid!)
34+ #}
35+
36+ {{ ctx.output_format(prefix="Classify with the following json:") }}
1937
20- INPUT
21- ---
22- date: {#input.message_date}
23- message: {#input.message }
24- ---
38+ {% for c in convo %}
39+ {{ PrintMessage(c,
40+ 'This is the message to classify:' if loop.last and convo|length > 1 else null
41+ ) } }
42+ {% endfor %}
2543
44+ {{ _.chat('assistant') }}
2645 JSON array of categories that match:
2746 "#
2847}
Original file line number Diff line number Diff line change @@ -43,7 +43,7 @@ async def extract_resume():
4343 - Wrote code in Python and Java
4444 """
4545 async def stream_resume (resume ):
46- async with b .ExtractResume .stream (resume ) as stream :
46+ async with b .ExtractResume .stream (raw_text = resume ) as stream :
4747 async for chunk in stream .parsed_stream :
4848 print (chunk .delta )
4949 if chunk .is_parseable :
Original file line number Diff line number Diff line change @@ -13,7 +13,7 @@ packages = [
1313python = " ^3.11"
1414fastapi = " ^0.110.0"
1515uvicorn = {extras = [" standard" ], version = " ^0.29.0" }
16- baml = " 0.17.1 "
16+ baml = " ^0.19.0 "
1717
1818
1919[build-system ]
Original file line number Diff line number Diff line change 11export const dynamic = 'force-dynamic'
22
33import b from '../../../baml_client'
4+ import { Role } from '../../../baml_client/types' ;
45
56export async function POST ( request : Request ) {
67 const result = await b . ClassifyMessage ( {
7- message : "I would like to cancel my order!" ,
8- message_date : "2021-01-01T00:00:00Z" ,
8+ convo : [
9+ {
10+ role : Role . Customer ,
11+ content : "I want to cancel my subscription"
12+ }
13+ ]
914 } ) ;
1015
1116 return Response . json ( result ) ;
Original file line number Diff line number Diff line change @@ -10,23 +10,18 @@ class Education {
1010 year int
1111}
1212
13- function ExtractResume {
14- input string
15- output Resume
16- }
17-
18- impl<llm, ExtractResume> version1 {
13+ function ExtractResume(raw_text: string) -> Resume {
1914 client GPT4
2015 prompt #"
2116 Parse the following resume and return a structured representation of the data in the schema below.
2217
2318 Resume:
2419 ---
25- {#input }
20+ {{raw_text} }
2621 ---
2722
2823 Output JSON format (only include these fields, and no others):
29- {#print_type(output) }
24+ {{ ctx.output_format(prefix=null) } }
3025
3126 Output JSON:
3227 "#
Original file line number Diff line number Diff line change @@ -6,23 +6,42 @@ enum Category {
66 Question
77}
88
9- function ClassifyMessage {
10- input (message: string, message_date: string)
11- output Category[]
9+ class Message {
10+ role Role
11+ content string
1212}
1313
14- impl<llm, ClassifyMessage> level1 {
14+ enum Role {
15+ Customer
16+ Assistant
17+ }
18+
19+ template_string PrintMessage(msg: Message, prefix: string?) #"
20+ {{ _.chat('user' if msg.role == "Customer" else 'assistant') }}
21+ {% if prefix %}
22+ {{ prefix }}
23+ {% endif %}
24+ {{ msg.content }}
25+ "#
26+
27+ function ClassifyMessage(convo: Message[]) -> Category[] {
1528 client GPT4
1629 prompt #"
17- Classify the following INPUT into following:
18- {#print_enum(Category)}
30+ {#
31+ Prompts are auto-dedented and trimmed.
32+ We use JINJA for our prompt syntax
33+ (but we added some static analysis to make sure it's valid!)
34+ #}
35+
36+ {{ ctx.output_format(prefix="Classify with the following json:") }}
1937
20- INPUT
21- ---
22- date: {#input.message_date}
23- message: {#input.message }
24- ---
38+ {% for c in convo %}
39+ {{ PrintMessage(c,
40+ 'This is the message to classify:' if loop.last and convo|length > 1 else null
41+ ) } }
42+ {% endfor %}
2543
44+ {{ _.chat('assistant') }}
2645 JSON array of categories that match:
2746 "#
2847}
You can’t perform that action at this time.
0 commit comments