1+ from  zai  import  ZhipuAiClient 
2+ 
3+ def  main ():
4+     client  =  ZhipuAiClient ()
5+     # create chat completion with tool calls and streaming 
6+     response  =  client .chat .completions .create (
7+         model = "glm-4.6" ,
8+         messages = [
9+             {"role" : "user" , "content" : "How is the weather in Beijing and Shanghai? Please provide the answer in Celsius." },
10+         ],
11+         tools = [
12+             {
13+                 "type" : "function" ,
14+                 "function" : {
15+                     "name" : "get_weather" ,
16+                     "description" : "Get the weather information for a specific location" ,
17+                     "parameters" : {
18+                         "type" : "object" ,
19+                         "properties" : {
20+                             "location" : {"type" : "string" , "description" : "City, eg: Beijing, Shanghai" },
21+                             "unit" : {"type" : "string" , "enum" : ["celsius" , "fahrenheit" ]}
22+                         },
23+                         "required" : ["location" ]
24+                     }
25+                 }
26+             }
27+         ],
28+         stream = True ,  # enable streaming 
29+         tool_stream = True   # enable tool call streaming 
30+     )
31+ 
32+     # init variables to collect streaming data 
33+     reasoning_content  =  ""   # reasoning content 
34+     content  =  ""   # response content 
35+     final_tool_calls  =  {}  # tool call data 
36+     reasoning_started  =  False   # is reasoning started 
37+     content_started  =  False   # is content started 
38+ 
39+     # process streaming response 
40+     for  chunk  in  response :
41+         if  not  chunk .choices :
42+             continue 
43+ 
44+         delta  =  chunk .choices [0 ].delta 
45+ 
46+         # process streaming reasoning output 
47+         if  hasattr (delta , 'reasoning_content' ) and  delta .reasoning_content :
48+             if  not  reasoning_started  and  delta .reasoning_content .strip ():
49+                 print ("\n 🧠 Thinking: " )
50+                 reasoning_started  =  True 
51+             reasoning_content  +=  delta .reasoning_content 
52+             print (delta .reasoning_content , end = "" , flush = True )
53+ 
54+         # process streaming answer content output 
55+         if  hasattr (delta , 'content' ) and  delta .content :
56+             if  not  content_started  and  delta .content .strip ():
57+                 print ("\n \n 💬 Answer: " )
58+                 content_started  =  True 
59+             content  +=  delta .content 
60+             print (delta .content , end = "" , flush = True )
61+ 
62+         # process streaming tool call info 
63+         if  delta .tool_calls :
64+             for  tool_call  in  delta .tool_calls :
65+                 index  =  tool_call .index 
66+                 if  index  not  in final_tool_calls :
67+                     # add new tool call 
68+                     final_tool_calls [index ] =  tool_call 
69+                     final_tool_calls [index ].function .arguments  =  tool_call .function .arguments 
70+                 else :
71+                     # append tool call params by streaming index 
72+                     final_tool_calls [index ].function .arguments  +=  tool_call .function .arguments 
73+ 
74+     # output the final construct tool call info 
75+     if  final_tool_calls :
76+         print ("\n 📋 Function Calls Triggered:" )
77+         for  index , tool_call  in  final_tool_calls .items ():
78+             print (f"  { index } { tool_call .function .name } { tool_call .function .arguments }  )
79+ 
80+ if  __name__  ==  "__main__" :
81+     main ()
0 commit comments