Run Job On File
client.extraction.jobs.file(JobFileParams { extraction_agent_id, file, from_ui, 2 more } params, RequestOptionsoptions?): ExtractJob { id, extraction_agent, status, 3 more }
POST/api/v1/extraction/jobs/file
Run Job On File
Parameters
Returns
Run Job On File
import LlamaCloud from '@llamaindex/llama-cloud';
const client = new LlamaCloud({
apiKey: process.env['LLAMA_CLOUD_API_KEY'], // This is the default and can be omitted
});
const extractJob = await client.extraction.jobs.file({
extraction_agent_id: '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
file: fs.createReadStream('path/to/file'),
});
console.log(extractJob.id);{
"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"extraction_agent": {
"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"config": {
"chunk_mode": "PAGE",
"citation_bbox": true,
"cite_sources": true,
"confidence_scores": true,
"extract_model": "openai-gpt-4-1",
"extraction_mode": "FAST",
"extraction_target": "PER_DOC",
"high_resolution_mode": true,
"invalidate_cache": true,
"multimodal_fast_mode": true,
"num_pages_context": 1,
"page_range": "page_range",
"parse_model": "openai-gpt-4o",
"priority": "low",
"system_prompt": "system_prompt",
"use_reasoning": true
},
"data_schema": {
"foo": {
"foo": "bar"
}
},
"name": "name",
"project_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"created_at": "2019-12-27T18:11:19.117Z",
"custom_configuration": "default",
"updated_at": "2019-12-27T18:11:19.117Z"
},
"status": "PENDING",
"error": "error",
"file": {
"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"name": "x",
"project_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"created_at": "2019-12-27T18:11:19.117Z",
"data_source_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"expires_at": "2019-12-27T18:11:19.117Z",
"external_file_id": "external_file_id",
"file_size": 0,
"file_type": "x",
"last_modified_at": "2019-12-27T18:11:19.117Z",
"permission_info": {
"foo": {
"foo": "bar"
}
},
"purpose": "purpose",
"resource_info": {
"foo": {
"foo": "bar"
}
},
"updated_at": "2019-12-27T18:11:19.117Z"
},
"file_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"
}Returns Examples
{
"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"extraction_agent": {
"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"config": {
"chunk_mode": "PAGE",
"citation_bbox": true,
"cite_sources": true,
"confidence_scores": true,
"extract_model": "openai-gpt-4-1",
"extraction_mode": "FAST",
"extraction_target": "PER_DOC",
"high_resolution_mode": true,
"invalidate_cache": true,
"multimodal_fast_mode": true,
"num_pages_context": 1,
"page_range": "page_range",
"parse_model": "openai-gpt-4o",
"priority": "low",
"system_prompt": "system_prompt",
"use_reasoning": true
},
"data_schema": {
"foo": {
"foo": "bar"
}
},
"name": "name",
"project_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"created_at": "2019-12-27T18:11:19.117Z",
"custom_configuration": "default",
"updated_at": "2019-12-27T18:11:19.117Z"
},
"status": "PENDING",
"error": "error",
"file": {
"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"name": "x",
"project_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"created_at": "2019-12-27T18:11:19.117Z",
"data_source_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"expires_at": "2019-12-27T18:11:19.117Z",
"external_file_id": "external_file_id",
"file_size": 0,
"file_type": "x",
"last_modified_at": "2019-12-27T18:11:19.117Z",
"permission_info": {
"foo": {
"foo": "bar"
}
},
"purpose": "purpose",
"resource_info": {
"foo": {
"foo": "bar"
}
},
"updated_at": "2019-12-27T18:11:19.117Z"
},
"file_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"
}