import os
from cerebras.cloud.sdk import Cerebras
client = Cerebras(
api_key=os.environ.get("CEREBRAS_API_KEY"), # This is the default and can be omitted
)
completion = client.completions.create(
prompt="It was a dark and stormy night",
max_tokens=100,
model="llama3.1-8b",
logprobs=5,
)
print(completion)
{
"id": "chatcmpl-b1743f2d-8c20-4ad5-a77c-426521ae1b1c",
"choices": [
{
"index": 0,
"finish_reason": "length",
"logprobs": {
"text_offset": [30, 34, 40, 44, 47, 52],
"token_logprobs": [
-3.3912997245788574,
-4.322617053985596,
-0.3532562553882599,
-1.7001153230667114,
-2.187103271484375,
-2.4667720794677734
],
"tokens": [" and", " there", " was", " no", " moon", ","],
"top_logprobs": [
{
".": -1.1334871053695679,
",": -1.3366121053695679,
" when": -2.3366122245788574,
" in": -2.9381747245788574,
"...": -3.3287997245788574,
" and": -3.3912997245788574
},
{
" I": -0.9554294943809509,
" the": -1.7679295539855957,
" a": -2.2366795539855957,
" all": -3.1898045539855957,
" we": -3.4241795539855957,
" there": -4.322617053985596
},
{
" was": -0.3532562553882599,
" were": -1.6813812255859375,
" I": -4.0251312255859375,
" wasn": -4.1345062255859375,
" had": -4.1970062255859375
},
{
" a": -0.3719903528690338,
" no": -1.7001153230667114,
" an": -3.614177942276001,
" this": -4.129802703857422,
" nothing": -4.184490203857422
},
{
" electricity": -1.3433531522750854,
" one": -1.6949156522750854,
" moon": -2.187103271484375,
" way": -3.038665771484375,
" sign": -3.054290771484375
},
{
".": -0.8495846390724182,
" to": -2.1386470794677734,
" in": -2.2870845794677734,
",": -2.4667720794677734,
".\n": -3.2714595794677734
}
]
},
"text": " and there was no moon,",
"tokens": null
}
],
"created": 1769297019,
"model": "llama3.1-8b",
"object": "text_completion",
"system_fingerprint": "fp_feb5e1faa8274e54bef0",
"time_info": {
"completion_time": 0.002412253,
"prompt_time": 0.000347391,
"queue_time": 0.000245702,
"total_time": 0.004331350326538086,
"created": 1769297019.2632425
},
"usage": {
"completion_tokens": 6,
"prompt_tokens": 8,
"prompt_tokens_details": {
"cached_tokens": 0
},
"total_tokens": 14
}
}
import os
from cerebras.cloud.sdk import Cerebras
client = Cerebras(
api_key=os.environ.get("CEREBRAS_API_KEY"), # This is the default and can be omitted
)
completion = client.completions.create(
prompt="It was a dark and stormy night",
max_tokens=100,
model="llama3.1-8b",
logprobs=5,
)
print(completion)
{
"id": "chatcmpl-b1743f2d-8c20-4ad5-a77c-426521ae1b1c",
"choices": [
{
"index": 0,
"finish_reason": "length",
"logprobs": {
"text_offset": [30, 34, 40, 44, 47, 52],
"token_logprobs": [
-3.3912997245788574,
-4.322617053985596,
-0.3532562553882599,
-1.7001153230667114,
-2.187103271484375,
-2.4667720794677734
],
"tokens": [" and", " there", " was", " no", " moon", ","],
"top_logprobs": [
{
".": -1.1334871053695679,
",": -1.3366121053695679,
" when": -2.3366122245788574,
" in": -2.9381747245788574,
"...": -3.3287997245788574,
" and": -3.3912997245788574
},
{
" I": -0.9554294943809509,
" the": -1.7679295539855957,
" a": -2.2366795539855957,
" all": -3.1898045539855957,
" we": -3.4241795539855957,
" there": -4.322617053985596
},
{
" was": -0.3532562553882599,
" were": -1.6813812255859375,
" I": -4.0251312255859375,
" wasn": -4.1345062255859375,
" had": -4.1970062255859375
},
{
" a": -0.3719903528690338,
" no": -1.7001153230667114,
" an": -3.614177942276001,
" this": -4.129802703857422,
" nothing": -4.184490203857422
},
{
" electricity": -1.3433531522750854,
" one": -1.6949156522750854,
" moon": -2.187103271484375,
" way": -3.038665771484375,
" sign": -3.054290771484375
},
{
".": -0.8495846390724182,
" to": -2.1386470794677734,
" in": -2.2870845794677734,
",": -2.4667720794677734,
".\n": -3.2714595794677734
}
]
},
"text": " and there was no moon,",
"tokens": null
}
],
"created": 1769297019,
"model": "llama3.1-8b",
"object": "text_completion",
"system_fingerprint": "fp_feb5e1faa8274e54bef0",
"time_info": {
"completion_time": 0.002412253,
"prompt_time": 0.000347391,
"queue_time": 0.000245702,
"total_time": 0.004331350326538086,
"created": 1769297019.2632425
},
"usage": {
"completion_tokens": 6,
"prompt_tokens": 8,
"prompt_tokens_details": {
"cached_tokens": 0
},
"total_tokens": 14
}
}
""data: [DONE] message.Default: falsefalsenullnullroot, fcall, nofcall, insidevalue, value, object, array, string, number, funcarray, func, ws.Default: nullseed and parameters should return the same result. Determinism is not guaranteed.Default: nullnulltop_p but not both.Default: 1.0temperature but not both.Default: 1.0return_raw_tokens=True.Default: falsenulllogprobs is 5, the API will return a list of the 5 most likely tokens. The API will always return the logprob of the sampled token, so there may be up to logprobs+1 elements in the response.The max value is 20.Default: nulllogprobs to 0 is different than null. When set to null, log probabilities are disabled entirely. When set to 0, log probabilities are enabled but it does not return top_logprobs.Show properties
stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, or content_filter if content was omitted due to a flag from our content filters.Show properties
top_logprobs returned.return_raw_tokens is set to true.seed request parameter to understand when backend changes have been made that might impact determinism.Show properties
Show properties
Show properties
import os
from cerebras.cloud.sdk import Cerebras
client = Cerebras(
api_key=os.environ.get("CEREBRAS_API_KEY"), # This is the default and can be omitted
)
completion = client.completions.create(
prompt="It was a dark and stormy night",
max_tokens=100,
model="llama3.1-8b",
logprobs=5,
)
print(completion)
{
"id": "chatcmpl-b1743f2d-8c20-4ad5-a77c-426521ae1b1c",
"choices": [
{
"index": 0,
"finish_reason": "length",
"logprobs": {
"text_offset": [30, 34, 40, 44, 47, 52],
"token_logprobs": [
-3.3912997245788574,
-4.322617053985596,
-0.3532562553882599,
-1.7001153230667114,
-2.187103271484375,
-2.4667720794677734
],
"tokens": [" and", " there", " was", " no", " moon", ","],
"top_logprobs": [
{
".": -1.1334871053695679,
",": -1.3366121053695679,
" when": -2.3366122245788574,
" in": -2.9381747245788574,
"...": -3.3287997245788574,
" and": -3.3912997245788574
},
{
" I": -0.9554294943809509,
" the": -1.7679295539855957,
" a": -2.2366795539855957,
" all": -3.1898045539855957,
" we": -3.4241795539855957,
" there": -4.322617053985596
},
{
" was": -0.3532562553882599,
" were": -1.6813812255859375,
" I": -4.0251312255859375,
" wasn": -4.1345062255859375,
" had": -4.1970062255859375
},
{
" a": -0.3719903528690338,
" no": -1.7001153230667114,
" an": -3.614177942276001,
" this": -4.129802703857422,
" nothing": -4.184490203857422
},
{
" electricity": -1.3433531522750854,
" one": -1.6949156522750854,
" moon": -2.187103271484375,
" way": -3.038665771484375,
" sign": -3.054290771484375
},
{
".": -0.8495846390724182,
" to": -2.1386470794677734,
" in": -2.2870845794677734,
",": -2.4667720794677734,
".\n": -3.2714595794677734
}
]
},
"text": " and there was no moon,",
"tokens": null
}
],
"created": 1769297019,
"model": "llama3.1-8b",
"object": "text_completion",
"system_fingerprint": "fp_feb5e1faa8274e54bef0",
"time_info": {
"completion_time": 0.002412253,
"prompt_time": 0.000347391,
"queue_time": 0.000245702,
"total_time": 0.004331350326538086,
"created": 1769297019.2632425
},
"usage": {
"completion_tokens": 6,
"prompt_tokens": 8,
"prompt_tokens_details": {
"cached_tokens": 0
},
"total_tokens": 14
}
}
Was this page helpful?