Orq MCP is live: Use natural language to interrogate traces, spot regressions, and experiment your way to optimal AI configurations. Available in Claude Desktop, Claude Code, Cursor, and more. Start now →
curl --request GET \
--url https://api.orq.ai/v2/annotation-queues/{annotation_queue_id}/items/{item_id} \
--header 'Authorization: Bearer <token>'{
"trace_id": "<string>",
"type": "span.generic",
"input": {},
"output": {},
"_id": "01KM333F8KCHWAFR6227N01Y7N",
"name": "<string>",
"duration": 123,
"session_id": "<string>",
"context": {
"trace_id": "<string>",
"span_id": "<string>"
},
"parent_id": "<string>",
"start_time": "<string>",
"end_time": "<string>",
"events": [
{}
],
"attributes": {
"type": "generic",
"gen_ai": {
"operation": {
"name": "<string>"
},
"request": {
"model": "<string>",
"frequency_penalty": 123,
"max_tokens": 123,
"presence_penalty": 123,
"seed": 123,
"temperature": 123,
"top_p": 123,
"top_k": 123,
"stream": false,
"choice": {
"count": 123
},
"stop_sequences": [
"<string>"
],
"encoding_formats": [
"<string>"
],
"response_format": "<unknown>"
},
"system": "<string>",
"system_instructions": "<string>",
"response": {
"finish_reasons": [
"<string>"
],
"id": "<string>",
"model": "<string>"
},
"usage": {
"input_tokens": 1,
"output_tokens": 1,
"prompt_tokens": 1,
"completion_tokens": 1,
"total_tokens": 1,
"prompt_tokens_details": {
"cached_tokens": 1,
"audio_tokens": 1
},
"completion_tokens_details": {
"reasoning_tokens": 1,
"audio_tokens": 1,
"accepted_prediction_tokens": 1,
"rejected_prediction_tokens": 1
}
},
"agent": {
"id": "<string>",
"name": "<string>",
"description": "<string>",
"source": "internal"
},
"conversation": {
"id": "<string>"
},
"tool": {
"name": "<string>",
"call": {
"id": "<string>",
"arguments": "<unknown>",
"result": "<unknown>"
},
"description": "<string>",
"type": "function",
"definitions": "<unknown>"
},
"data_source": {
"id": "<string>"
},
"input": {
"messages": [
"<unknown>"
],
"prompt": "<string>"
},
"output": {
"messages": [
"<unknown>"
],
"type": "text",
"completion": "<string>"
},
"provider": {
"name": "openai"
},
"token": {
"type": "input"
},
"embeddings": {
"dimension": {
"count": 123
}
},
"evaluation": {
"name": "<string>",
"score": {
"value": 123,
"label": "<string>"
},
"explanation": "<string>"
}
},
"orq": {
"related_entities": {},
"workspace_id": "<string>",
"project_id": "<string>",
"contact_id": "<string>",
"api_key_id": "<string>",
"thread_id": "<string>",
"product": "<string>",
"billing": {
"total_cost": 1,
"input_cost": 1,
"output_cost": 1,
"billable": true,
"integration_id": "orq"
},
"latency": 123,
"costs": 123,
"object_name": "<string>",
"variables": {},
"context": {},
"evaluations": [
{
"id": "<string>",
"evaluation_type": "human_review",
"human_review_id": "<string>",
"reviewed_by_id": "<string>",
"type": "string",
"value": "<string>",
"source": "orq",
"reviewed_at": "2026-03-19T13:03:36.213Z"
}
],
"duration": 123,
"trace": {
"framework": {
"name": "<string>",
"version": "<string>"
}
},
"internal": true,
"settings": {
"engine": "text"
}
},
"otel": {
"status_code": "OK",
"status_description": "<string>"
},
"http": {
"response": {
"status_code": 123
}
},
"openresponses": {
"input": [
{
"id": "<string>",
"type": "item_reference"
}
],
"output": [
{
"id": "<string>",
"status": "in_progress",
"role": "user",
"content": [
{
"text": "<string>",
"type": "input_text"
}
],
"type": "message"
}
],
"object": "<string>",
"created_at": 123,
"status": "<string>",
"completed_at": "<string>",
"incomplete_details": "<string>",
"error": "<string>",
"previous_response_id": "<string>",
"instructions": "<string>",
"truncation": "auto",
"service_tier": "auto",
"safety_identifier": "<string>",
"prompt_cache_key": "<string>",
"user": "<string>",
"temperature": 123,
"top_p": 123,
"presence_penalty": 123,
"frequency_penalty": 123,
"parallel_tool_calls": true,
"stream": true,
"background": true,
"store": true,
"top_logprobs": 123,
"max_output_tokens": 123,
"max_tool_calls": 123,
"tools": [
{
"name": "<string>",
"description": "<string>",
"parameters": {},
"strict": true,
"type": "function"
}
],
"tools_count": 123,
"tool_choice": "<string>",
"metadata": {},
"text": "<string>",
"stream_options": "<string>",
"reasoning": "<string>",
"include": "<string>",
"include_count": 123
},
"metadata": {}
}
}Retrieves an item from the specified annotation queue.
curl --request GET \
--url https://api.orq.ai/v2/annotation-queues/{annotation_queue_id}/items/{item_id} \
--header 'Authorization: Bearer <token>'{
"trace_id": "<string>",
"type": "span.generic",
"input": {},
"output": {},
"_id": "01KM333F8KCHWAFR6227N01Y7N",
"name": "<string>",
"duration": 123,
"session_id": "<string>",
"context": {
"trace_id": "<string>",
"span_id": "<string>"
},
"parent_id": "<string>",
"start_time": "<string>",
"end_time": "<string>",
"events": [
{}
],
"attributes": {
"type": "generic",
"gen_ai": {
"operation": {
"name": "<string>"
},
"request": {
"model": "<string>",
"frequency_penalty": 123,
"max_tokens": 123,
"presence_penalty": 123,
"seed": 123,
"temperature": 123,
"top_p": 123,
"top_k": 123,
"stream": false,
"choice": {
"count": 123
},
"stop_sequences": [
"<string>"
],
"encoding_formats": [
"<string>"
],
"response_format": "<unknown>"
},
"system": "<string>",
"system_instructions": "<string>",
"response": {
"finish_reasons": [
"<string>"
],
"id": "<string>",
"model": "<string>"
},
"usage": {
"input_tokens": 1,
"output_tokens": 1,
"prompt_tokens": 1,
"completion_tokens": 1,
"total_tokens": 1,
"prompt_tokens_details": {
"cached_tokens": 1,
"audio_tokens": 1
},
"completion_tokens_details": {
"reasoning_tokens": 1,
"audio_tokens": 1,
"accepted_prediction_tokens": 1,
"rejected_prediction_tokens": 1
}
},
"agent": {
"id": "<string>",
"name": "<string>",
"description": "<string>",
"source": "internal"
},
"conversation": {
"id": "<string>"
},
"tool": {
"name": "<string>",
"call": {
"id": "<string>",
"arguments": "<unknown>",
"result": "<unknown>"
},
"description": "<string>",
"type": "function",
"definitions": "<unknown>"
},
"data_source": {
"id": "<string>"
},
"input": {
"messages": [
"<unknown>"
],
"prompt": "<string>"
},
"output": {
"messages": [
"<unknown>"
],
"type": "text",
"completion": "<string>"
},
"provider": {
"name": "openai"
},
"token": {
"type": "input"
},
"embeddings": {
"dimension": {
"count": 123
}
},
"evaluation": {
"name": "<string>",
"score": {
"value": 123,
"label": "<string>"
},
"explanation": "<string>"
}
},
"orq": {
"related_entities": {},
"workspace_id": "<string>",
"project_id": "<string>",
"contact_id": "<string>",
"api_key_id": "<string>",
"thread_id": "<string>",
"product": "<string>",
"billing": {
"total_cost": 1,
"input_cost": 1,
"output_cost": 1,
"billable": true,
"integration_id": "orq"
},
"latency": 123,
"costs": 123,
"object_name": "<string>",
"variables": {},
"context": {},
"evaluations": [
{
"id": "<string>",
"evaluation_type": "human_review",
"human_review_id": "<string>",
"reviewed_by_id": "<string>",
"type": "string",
"value": "<string>",
"source": "orq",
"reviewed_at": "2026-03-19T13:03:36.213Z"
}
],
"duration": 123,
"trace": {
"framework": {
"name": "<string>",
"version": "<string>"
}
},
"internal": true,
"settings": {
"engine": "text"
}
},
"otel": {
"status_code": "OK",
"status_description": "<string>"
},
"http": {
"response": {
"status_code": 123
}
},
"openresponses": {
"input": [
{
"id": "<string>",
"type": "item_reference"
}
],
"output": [
{
"id": "<string>",
"status": "in_progress",
"role": "user",
"content": [
{
"text": "<string>",
"type": "input_text"
}
],
"type": "message"
}
],
"object": "<string>",
"created_at": 123,
"status": "<string>",
"completed_at": "<string>",
"incomplete_details": "<string>",
"error": "<string>",
"previous_response_id": "<string>",
"instructions": "<string>",
"truncation": "auto",
"service_tier": "auto",
"safety_identifier": "<string>",
"prompt_cache_key": "<string>",
"user": "<string>",
"temperature": 123,
"top_p": 123,
"presence_penalty": 123,
"frequency_penalty": 123,
"parallel_tool_calls": true,
"stream": true,
"background": true,
"store": true,
"top_logprobs": 123,
"max_output_tokens": 123,
"max_tool_calls": 123,
"tools": [
{
"name": "<string>",
"description": "<string>",
"parameters": {},
"strict": true,
"type": "function"
}
],
"tools_count": 123,
"tool_choice": "<string>",
"metadata": {},
"text": "<string>",
"stream_options": "<string>",
"reasoning": "<string>",
"include": "<string>",
"include_count": 123
},
"metadata": {}
}
}Bearer authentication header of the form Bearer <token>, where <token> is your auth token.
Annotation queue item retrieved.
Unique trace ID
span.generic Show child attributes
Show child attributes
Name of the trace or block
Total time elapsed between the trace start and end timestamp
Unique ID to relate multiple traces together
Context information for the trace
Show child attributes
Parent trace ID, if applicable
ISO timestamp indicating when the trace started
ISO timestamp indicating when the trace ended
List of events associated with this trace
Show child attributes
Show child attributes
Was this page helpful?