Package Exports
- @filefeed/sdk
Readme
@filefeed/sdk
Official TypeScript SDK for the FileFeed API. Provides a type-safe, intuitive interface for interacting with FileFeed's file processing and pipeline management platform.
Installation
npm install @filefeed/sdk
# or
yarn add @filefeed/sdk
# or
pnpm add @filefeed/sdkQuick Start
import FileFeed from '@filefeed/sdk';
const filefeed = new FileFeed({
apiKey: 'your-api-key',
});
// List completed pipeline runs
const runs = await filefeed.pipelineRuns.list({
status: 'completed',
limit: 50,
});
// Get data from a pipeline run
const data = await filefeed.pipelineRuns.getData({
pipelineRunId: 'run_id',
limit: 1000,
});Configuration
FileFeedConfig
interface FileFeedConfig {
apiKey: string; // Required: Your FileFeed API key
}Resources
Pipeline Runs
Manage and retrieve data from pipeline runs.
// List pipeline runs with filtering
const runs = await filefeed.pipelineRuns.list({
model: 'employee',
clientName: 'Acme Corp',
status: 'completed',
limit: 50,
});
// Get a specific pipeline run
const run = await filefeed.pipelineRuns.retrieve('run_id');
// Get data from a pipeline run with pagination
const page = await filefeed.pipelineRuns.getData({
pipelineRunId: 'run_id',
limit: 1000,
offset: 0,
});
// Acknowledge a pipeline run (mark as processed)
await filefeed.pipelineRuns.ack({
pipelineRunId: 'run_id',
});
// Reprocess a failed pipeline run
await filefeed.pipelineRuns.reprocess({
pipelineRunId: 'run_id',
});
// Get presigned URLs for files
const originalUrl = await filefeed.pipelineRuns.getOriginalFileUrl({
pipelineRunId: 'run_id',
expiresIn: 3600,
});
const processedUrl = await filefeed.pipelineRuns.getProcessedFileUrl({
pipelineRunId: 'run_id',
expiresIn: 3600,
});Clients
Manage SFTP client connections.
// List all clients
const clients = await filefeed.clients.list();
// Get a specific client
const client = await filefeed.clients.retrieve('client_id');
// Create a new client
const newClient = await filefeed.clients.create({
name: 'My SFTP Client',
sftpUsername: 'user',
sftpPassword: 'password',
useHostedSFTP: false,
});
// Update a client
const updatedClient = await filefeed.clients.update('client_id', {
name: 'Updated Name',
});
// Delete a client
await filefeed.clients.remove('client_id');
// Test SFTP connection
const result = await filefeed.clients.testConnection('client_id');Schemas
Manage data schemas for validation and transformation.
// List all schemas
const schemas = await filefeed.schemas.list();
// Get a specific schema
const schema = await filefeed.schemas.retrieve('schema_id');
// Create a new schema
const newSchema = await filefeed.schemas.create({
name: 'Employee Schema',
description: 'Schema for employee data',
fields: [
{ name: 'id', type: 'string', required: true },
{ name: 'name', type: 'string', required: true },
{ name: 'email', type: 'string', required: true },
{ name: 'department', type: 'string', required: false },
],
});
// Update a schema
const updatedSchema = await filefeed.schemas.update('schema_id', {
description: 'Updated description',
});
// Delete a schema
await filefeed.schemas.remove('schema_id');
// Validate data against a schema
const validation = await filefeed.schemas.validate({
schemaId: 'schema_id',
data: { id: '123', name: 'John Doe', email: 'john@example.com' },
});Pipelines
Manage data transformation pipelines.
// List all pipelines
const pipelines = await filefeed.pipelines.list();
// Get a specific pipeline
const pipeline = await filefeed.pipelines.retrieve('pipeline_id');
// Create a new pipeline
const newPipeline = await filefeed.pipelines.create({
name: 'Employee Data Pipeline',
clientId: 'client_id',
schemaId: 'schema_id',
mappings: [
{ sourceField: 'emp_id', targetField: 'id' },
{ sourceField: 'full_name', targetField: 'name' },
],
});
// Update a pipeline
const updatedPipeline = await filefeed.pipelines.update('pipeline_id', {
name: 'Updated Pipeline Name',
});
// Delete a pipeline
await filefeed.pipelines.remove('pipeline_id');
// Toggle pipeline active status
const toggledPipeline = await filefeed.pipelines.toggleActive('pipeline_id');Webhooks
Manage webhook notifications for pipeline events.
// List all webhooks
const webhooks = await filefeed.webhooks.list();
// Get a specific webhook
const webhook = await filefeed.webhooks.retrieve('webhook_id');
// Create a new webhook
const newWebhook = await filefeed.webhooks.create({
name: 'Success Notification',
url: 'https://example.com/webhook',
eventType: 'success',
headers: {
'Authorization': 'Bearer token',
},
isActive: true,
});
// Update a webhook
const updatedWebhook = await filefeed.webhooks.update('webhook_id', {
url: 'https://example.com/new-webhook',
});
// Delete a webhook
await filefeed.webhooks.remove('webhook_id');
// List webhook deliveries
const deliveries = await filefeed.webhooks.listDeliveries({
webhookId: 'webhook_id',
page: 1,
limit: 50,
success: true,
});Complete Example: Processing Pipeline Runs
Here's a complete example that mirrors your use case:
import FileFeed from '@filefeed/sdk';
const filefeed = new FileFeed({
apiKey: process.env.FILEFEED_API_KEY!,
});
async function syncPipelineData() {
// Get all completed, unacknowledged pipeline runs
const pipelineRuns = await filefeed.pipelineRuns.list({
model: 'employee',
clientName: 'client-id',
status: 'completed',
acked: false,
});
for (const run of pipelineRuns.data) {
console.log(`Processing pipeline run: ${run.id}`);
let offset: number | null = 0;
let allRecords: any[] = [];
// Paginate through all data
do {
const page = await filefeed.pipelineRuns.getData({
pipelineRunId: run.id,
limit: 1000,
offset,
});
allRecords = allRecords.concat(page.data);
// Update offset for next page
offset = page.data.length === 1000
? (offset ?? 0) + page.data.length
: null;
} while (offset !== null);
console.log(`Retrieved ${allRecords.length} records`);
// Store and sync logic here
await storeRecordsInDatabase(allRecords);
// Acknowledge the pipeline run
await filefeed.pipelineRuns.ack({
pipelineRunId: run.id,
});
console.log(`Pipeline run ${run.id} acknowledged`);
}
}
async function storeRecordsInDatabase(records: any[]) {
// Your database storage logic
console.log(`Storing ${records.length} records...`);
}
// Run the sync
syncPipelineData().catch(console.error);Error Handling
The SDK throws FileFeedError for API errors:
import { FileFeedError } from '@filefeed/sdk';
try {
const runs = await filefeed.pipelineRuns.list();
} catch (error) {
if (error instanceof FileFeedError) {
console.error(`API Error: ${error.message}`);
console.error(`Status Code: ${error.statusCode}`);
console.error(`Details:`, error.data);
} else {
console.error('Unexpected error:', error);
}
}TypeScript Support
The SDK is written in TypeScript and provides full type definitions:
import type {
PipelineRun,
Client,
Schema,
Pipeline,
Webhook,
PaginatedResponse,
} from '@filefeed/sdk';
// All types are exported and available for use
const handleRun = (run: PipelineRun) => {
console.log(run.status); // TypeScript knows all properties
};Advanced Usage
Development Environment
For development/testing, you can set the base URL via environment variable:
# .env
FILEFEED_BASE_URL=http://localhost:3001
FILEFEED_API_KEY=your-api-keyconst filefeed = new FileFeed({
apiKey: process.env.FILEFEED_API_KEY!,
tenant: 'demo1',
});
// SDK will use FILEFEED_BASE_URL if set, otherwise defaults to productionRequirements
- Node.js >= 18.0.0
- TypeScript >= 5.0 (for TypeScript projects)
License
MIT
Support
For issues, questions, or contributions, please visit:
- GitHub: https://github.com/filefeed/filefeed-sdk
- Documentation: https://docs.filefeed.com
- Email: support@filefeed.com