Skip to content

Commit 25eba0f

Browse files
Fix bitrot (#326)
Co-authored-by: Daniel Holanda <[email protected]> Signed-off-by: Jeremy Fowers <[email protected]>
1 parent 690854a commit 25eba0f

File tree

3 files changed

+9
-8
lines changed

3 files changed

+9
-8
lines changed

setup.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,8 @@
5656
"zstandard",
5757
"matplotlib",
5858
"tabulate",
59-
# huggingface-hub==0.31.0 introduces a new transfer protocol that was causing us issues
59+
"onnxconverter-common",
60+
# huggingface-hub==0.31.0 introduces a new transfer protocol that was causing us issues
6061
"huggingface-hub==0.30.2",
6162
# Conditional dependencies for ONNXRuntime backends
6263
"onnxruntime >=1.10.1,<1.22.0;platform_system=='Linux' and extra != 'llm-oga-cuda'",
@@ -65,8 +66,8 @@
6566
],
6667
extras_require={
6768
"llm": [
68-
"torch>=2.0.0",
69-
"transformers",
69+
"torch>=2.6.0",
70+
"transformers<=4.51.3",
7071
"accelerate",
7172
"py-cpuinfo",
7273
"sentencepiece",
@@ -76,23 +77,20 @@
7677
"human-eval-windows==1.0.4",
7778
"fastapi",
7879
"uvicorn[standard]",
79-
"openai>=1.66.0",
80+
"openai>=1.81.0",
8081
"lm-eval[api]",
8182
],
8283
"llm-oga-cpu": [
8384
"onnxruntime-genai==0.6.0",
84-
"torch>=2.0.0,<2.4",
8585
"turnkeyml[llm]",
8686
],
8787
"llm-oga-igpu": [
8888
"onnxruntime-genai-directml==0.6.0",
89-
"torch>=2.0.0,<2.4",
9089
"transformers<4.45.0",
9190
"turnkeyml[llm]",
9291
],
9392
"llm-oga-cuda": [
9493
"onnxruntime-genai-cuda==0.6.0",
95-
"torch>=2.0.0,<2.4",
9694
"transformers<4.45.0",
9795
"turnkeyml[llm]",
9896
],

src/lemonade/tools/server/serve.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -762,6 +762,7 @@ async def generate():
762762
created_event = ResponseCreatedEvent(
763763
response=response,
764764
type="response.created",
765+
sequence_number=0,
765766
)
766767
yield f"data: {created_event.model_dump_json()}\n\n".encode("utf-8")
767768

@@ -776,6 +777,7 @@ async def generate():
776777
item_id="0 ",
777778
output_index=0,
778779
type="response.output_text.delta",
780+
sequence_number=0,
779781
)
780782
full_response += token
781783

@@ -810,6 +812,7 @@ async def generate():
810812
completed_event = ResponseCompletedEvent(
811813
response=response,
812814
type="response.completed",
815+
sequence_number=0,
813816
)
814817
yield f"data: {completed_event.model_dump_json()}\n\n".encode("utf-8")
815818

src/turnkeyml/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "6.2.4"
1+
__version__ = "6.2.5"

0 commit comments

Comments
 (0)