tests/testthat/openai/api.openai.com/v1/chat/completions-2f9822-POST.R

structure(list(method = "POST", url = "https://api.openai.com/v1/chat/completions", 
    status_code = 200L, headers = structure(list(date = "Thu, 21 Aug 2025 12:41:43 GMT", 
        `content-type` = "application/json", `access-control-expose-headers` = "X-Request-ID", 
        `openai-organization` = "user-j2dlcmauhfkzofrqjehceoc7", 
        `openai-processing-ms` = "1448", `openai-project` = "proj_wN2FU9W9VUckqc7nz44xte9H", 
        `openai-version` = "2020-10-01", `x-envoy-upstream-service-time` = "1692", 
        `x-ratelimit-limit-requests` = "5000", `x-ratelimit-limit-tokens` = "800000", 
        `x-ratelimit-remaining-requests` = "4999", `x-ratelimit-remaining-tokens` = "799986", 
        `x-ratelimit-reset-requests` = "12ms", `x-ratelimit-reset-tokens` = "1ms", 
        `x-request-id` = "req_27e498e7619b4c06be448aa777a8db69", 
        `cf-cache-status` = "DYNAMIC", `set-cookie` = "REDACTED", 
        `strict-transport-security` = "max-age=31536000; includeSubDomains; preload", 
        `x-content-type-options` = "nosniff", `set-cookie` = "REDACTED", 
        server = "cloudflare", `cf-ray` = "972a34c3ccb0bbc2-FRA", 
        `content-encoding` = "gzip", `alt-svc` = "h3=\":443\"; ma=86400"), redact = character(0), class = "httr2_headers"), 
    body = charToRaw("{\n  \"id\": \"chatcmpl-C6ytqoS5qECdjV3EhnrEANIEcyvgY\",\n  \"object\": \"chat.completion\",\n  \"created\": 1755780102,\n  \"model\": \"gpt-4.1-2025-04-14\",\n  \"choices\": [\n    {\n      \"index\": 0,\n      \"message\": {\n        \"role\": \"assistant\",\n        \"content\": \"Hello, world! 👋 How can I help you today?\",\n        \"refusal\": null,\n        \"annotations\": []\n      },\n      \"logprobs\": null,\n      \"finish_reason\": \"stop\"\n    }\n  ],\n  \"usage\": {\n    \"prompt_tokens\": 21,\n    \"completion_tokens\": 13,\n    \"total_tokens\": 34,\n    \"prompt_tokens_details\": {\n      \"cached_tokens\": 0,\n      \"audio_tokens\": 0\n    },\n    \"completion_tokens_details\": {\n      \"reasoning_tokens\": 0,\n      \"audio_tokens\": 0,\n      \"accepted_prediction_tokens\": 0,\n      \"rejected_prediction_tokens\": 0\n    }\n  },\n  \"service_tier\": \"default\",\n  \"system_fingerprint\": \"fp_daf5fcc80a\"\n}\n"), 
    cache = new.env(parent = emptyenv())), class = "httr2_response")

Try the tidyllm package in your browser

Any scripts or data that you put into this service are public.

tidyllm documentation built on Aug. 22, 2025, 5:21 p.m.