cloudku-uploader
Blazing-fast, zero-dependency uploader for CloudKu. Supports auto-conversion, chunked uploads, and TypeScript. Easily upload images, videos, audio, and documents via Node.js.
Revolutionary File Upload Solution - Zero Dependencies, Maximum Performance
🚀 Built for Modern JavaScript Environments | 🌐 Global CDN | ⚡ Lightning Fast
📦 Quick Install • 🚀 Get Started • 📖 API Docs • 💡 Examples • 🌐 Support
- Smart Upload Functions - Predefined time-based uploads
- Batch Processing - Upload multiple files simultaneously
- Enhanced Error Handling - Better failure recovery
- Performance Boost - 40% faster than v2.5
- Mobile Optimized - Perfect responsive design
// Quick time-based uploads
import { upload30s, upload7d, upload1y } from 'cloudku-uploader'
// Batch uploads
import { uploadBatch } from 'cloudku-uploader'
// Smart parsing
import { parseExpireTime } from 'cloudku-uploader'
|
|
|
# Using npm
npm install cloudku-uploader
# Using yarn
yarn add cloudku-uploader
# Using pnpm
pnpm add cloudku-uploader
# Using bun
bun add cloudku-uploader
import { uploadFile } from 'cloudku-uploader'
// Simple permanent upload
const result = await uploadFile(fileBuffer, 'image.jpg')
console.log('✅ Upload URL:', result.result.url)
// Temporary upload with expiry
const tempResult = await uploadFile(fileBuffer, 'temp.pdf', '7d')
console.log('⏰ Expires in 7 days:', tempResult.result.url)
import { uploadSmart } from 'cloudku-uploader'
// Auto-detects expiry format
const result = await uploadSmart(fileBuffer, 'document.pdf', '30d')
console.log('🎯 Smart upload:', result)
import {
upload30s, upload15m, upload6h,
upload7d, upload3M, upload1y
} from 'cloudku-uploader'
// Ultra-fast temporary uploads
const quick = await upload30s(buffer, 'temp.jpg') // 30 seconds
const short = await upload15m(buffer, 'preview.png') // 15 minutes
const daily = await upload6h(buffer, 'report.pdf') // 6 hours
const weekly = await upload7d(buffer, 'backup.zip') // 7 days
const quarterly = await upload3M(buffer, 'archive.tar') // 3 months
const longterm = await upload1y(buffer, 'storage.mp4') // 1 year
import React, { useState } from 'react'
import { uploadSmart } from 'cloudku-uploader'
function FileUploader() {
const [uploading, setUploading] = useState(false)
const [result, setResult] = useState(null)
const handleUpload = async (file, expiry = null) => {
setUploading(true)
try {
const buffer = await file.arrayBuffer()
const response = await uploadSmart(
new Uint8Array(buffer),
file.name,
expiry
)
setResult(response)
} catch (error) {
console.error('Upload failed:', error)
} finally {
setUploading(false)
}
}
return (
<div className="upload-container">
<input
type="file"
onChange={(e) => handleUpload(e.target.files[0], '7d')}
disabled={uploading}
/>
{uploading && <p>⏳ Uploading...</p>}
{result && (
<div>
<p>✅ Success!</p>
<a href={result.result.url} target="_blank">
View File: {result.result.filename}
</a>
</div>
)}
</div>
)
}
import express from 'express'
import multer from 'multer'
import { uploadSmart, uploadBatch } from 'cloudku-uploader'
const app = express()
const upload = multer({
limits: { fileSize: 100 * 1024 * 1024 }, // 100MB
storage: multer.memoryStorage()
})
// Single file upload
app.post('/upload', upload.single('file'), async (req, res) => {
try {
const { buffer, originalname } = req.file
const expiry = req.body.expiry || null
const result = await uploadSmart(buffer, originalname, expiry)
if (result.status === 'success') {
res.json({
success: true,
data: {
url: result.result.url,
filename: result.result.filename,
size: result.result.size,
expires: expiry ? `in ${expiry}` : 'never'
}
})
} else {
res.status(400).json({ error: result.message })
}
} catch (error) {
res.status(500).json({ error: error.message })
}
})
// Batch upload
app.post('/upload/batch', upload.array('files'), async (req, res) => {
try {
const files = req.files.map(file => ({
buffer: file.buffer,
name: file.originalname,
expire: req.body.expiry || null
}))
const results = await uploadBatch(files)
res.json({
success: true,
total: files.length,
results: results.map(r => ({
status: r.status,
data: r.data?.result || null,
error: r.error?.message || null
}))
})
} catch (error) {
res.status(500).json({ error: error.message })
}
})
app.listen(3000, () => {
console.log('🚀 Server running on port 3000')
})
// app/api/upload/route.js
import { uploadSmart } from 'cloudku-uploader'
export async function POST(request) {
try {
const formData = await request.formData()
const file = formData.get('file')
const expiry = formData.get('expiry') || null
if (!file) {
return Response.json(
{ error: 'No file provided' },
{ status: 400 }
)
}
const buffer = new Uint8Array(await file.arrayBuffer())
const result = await uploadSmart(buffer, file.name, expiry)
if (result.status === 'success') {
return Response.json({
success: true,
url: result.result.url,
filename: result.result.filename,
size: result.result.size
})
} else {
return Response.json(
{ error: result.message },
{ status: 400 }
)
}
} catch (error) {
return Response.json(
{ error: error.message },
{ status: 500 }
)
}
}
import { uploadBatch, upload7d } from 'cloudku-uploader'
import fs from 'fs'
import path from 'path'
class BatchUploader {
async uploadDirectory(dirPath, options = {}) {
const { concurrency = 3, expiry = null } = options
const files = fs.readdirSync(dirPath)
.map(filename => ({
buffer: fs.readFileSync(path.join(dirPath, filename)),
name: filename,
expire: expiry
}))
console.log(`📦 Processing ${files.length} files...`)
// Process in batches for better performance
const results = []
for (let i = 0; i < files.length; i += concurrency) {
const batch = files.slice(i, i + concurrency)
const batchResults = await uploadBatch(batch)
results.push(...batchResults)
console.log(`✅ Processed batch ${Math.ceil((i + 1) / concurrency)}`)
}
return results
}
}
// Usage
const uploader = new BatchUploader()
const results = await uploader.uploadDirectory('./uploads', {
concurrency: 5,
expiry: '30d'
})
console.table(results.map(r => ({
status: r.status,
filename: r.data?.result?.filename || 'failed',
url: r.data?.result?.url || 'N/A'
})))
Unit | Description | Example | Use Case |
---|---|---|---|
s |
Seconds | 30s |
Real-time processing |
m |
Minutes | 15m |
Quick previews |
h |
Hours | 6h |
Daily tasks |
d |
Days | 7d |
Weekly backups |
M |
Months | 3M |
Quarterly archives |
y |
Years | 1y |
Long-term storage |
import { parseExpireTime } from 'cloudku-uploader'
// Auto-converts to ISO date
console.log(parseExpireTime('7d')) // 2025-06-30
console.log(parseExpireTime('3M')) // 2025-09-23
console.log(parseExpireTime('1y')) // 2026-06-23
console.log(parseExpireTime(null)) // null (permanent)
Primary upload function with manual expiry control.
uploadFile(
buffer: Buffer | Uint8Array,
fileName?: string,
expireDate?: string | null
): Promise<UploadResponse>
Intelligent upload with automatic time parsing.
uploadSmart(
buffer: Buffer | Uint8Array,
fileName?: string,
expireTime?: string | null
): Promise<UploadResponse>
Upload multiple files simultaneously.
uploadBatch(
files: Array<{
buffer: Buffer | Uint8Array,
name: string,
expire?: string | null
}>
): Promise<BatchResult[]>
// Time-based upload shortcuts
upload30s(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload15m(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload6h(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload7d(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload3M(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload1y(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
interface UploadResponse {
status: 'success' | 'error'
creator?: 'AlfiDev'
information: string
result?: {
filename: string
type: string
size: string
url: string
}
message?: string
}
interface BatchResult {
index: number
status: 'fulfilled' | 'rejected'
data: UploadResponse | null
error: Error | null
}
Category | Extensions | Max Size | Features |
---|---|---|---|
🖼️ Images | JPG, PNG, GIF, WebP, SVG, AVIF, HEIC | 100 MB | Auto-optimization |
📄 Documents | PDF, DOC, DOCX, TXT, MD, RTF | 50 MB | Text extraction |
🗜️ Archives | ZIP, RAR, 7Z, TAR, GZ | 500 MB | Compression analysis |
🎵 Audio | MP3, WAV, FLAC, AAC, OGG | 200 MB | Metadata preservation |
🎬 Video | MP4, AVI, MOV, MKV, WebM | 1 GB | Thumbnail generation |
💻 Code | JS, TS, PY, GO, RS, C, CPP | 10 MB | Syntax highlighting |
Primary: https://cloudkuimages.guru
Fallback: https://cloudkuimages-guru.us.itpanel.app
Region | Locations | Avg Latency |
---|---|---|
🌍 Europe | London, Frankfurt, Paris, Amsterdam | < 25ms |
🌎 Americas | New York, Toronto, São Paulo, LA | < 30ms |
🌏 Asia-Pacific | Tokyo, Singapore, Sydney, Mumbai | < 35ms |
- Uptime: 99.99% SLA
- Global CDN: 200+ PoPs
- Cache Hit Rate: > 95%
- DDoS Protection: Enterprise-grade
- Auto-scaling: Dynamic resource allocation
// Security headers automatically applied
const securityHeaders = {
'x-content-type-options': 'nosniff',
'x-frame-options': 'DENY',
'x-xss-protection': '0',
'referrer-policy': 'strict-origin-when-cross-origin',
'x-provided-by': 'StackCDN'
}
- ✅ MIME Type Verification - Server-side validation
- ✅ File Size Limits - Configurable per category
- ✅ Extension Whitelist - Secure filtering
- ✅ Content Scanning - Malware detection
- ✅ Rate Limiting - Abuse prevention
- ✅ Input Sanitization - XSS protection
Original Bundle: 2.4KB
Minified: 1.8KB
Gzipped: 0.7KB
Brotli: 0.5KB
Cold Start: < 20ms
First Upload: < 80ms
Subsequent: < 40ms
Throughput: > 35MB/s
Baseline: < 1MB
Per Upload: < 100KB
Peak Usage: < 5MB
Cleanup: Automatic
- All existing
uploadFile()
calls - Response format unchanged
- Error handling consistent
// Old way (still works)
import UploadFile from 'cloudku-uploader'
const result = await new UploadFile().upload(buffer, 'file.jpg', '7d')
// New way (recommended)
import { uploadSmart } from 'cloudku-uploader'
const result = await uploadSmart(buffer, 'file.jpg', '7d')
// Even better - use shortcuts
import { upload7d } from 'cloudku-uploader'
const result = await upload7d(buffer, 'file.jpg')
// v2.5
import UploadFile from 'cloudku-uploader'
// v2.7 - Multiple import options
import {
uploadFile, // Core function
uploadSmart, // Smart parsing
uploadBatch, // Batch processing
upload30s, // Quick shortcuts
upload7d,
upload1y,
parseExpireTime // Utility function
} from 'cloudku-uploader'
// Or import everything
import * as CloudKu from 'cloudku-uploader'
import { uploadSmart, parseExpireTime } from 'cloudku-uploader'
import { describe, it, expect } from 'vitest'
describe('CloudKu Uploader', () => {
it('should parse expiry times correctly', () => {
expect(parseExpireTime('7d')).toMatch(/^\d{4}-\d{2}-\d{2}$/)
expect(parseExpireTime('1y')).toMatch(/^\d{4}-\d{2}-\d{2}$/)
expect(parseExpireTime(null)).toBe(null)
})
it('should upload file successfully', async () => {
const buffer = new Uint8Array([0xFF, 0xD8, 0xFF]) // JPEG header
const result = await uploadSmart(buffer, 'test.jpg', '1d')
expect(result.status).toBe('success')
expect(result.result.url).toContain('cloudkuimages')
})
})
import { uploadBatch } from 'cloudku-uploader'
import { performance } from 'perf_hooks'
async function benchmarkUpload() {
const files = Array.from({ length: 10 }, (_, i) => ({
buffer: new Uint8Array(1024 * 100), // 100KB each
name: `test-${i}.bin`,
expire: '1d'
}))
const start = performance.now()
const results = await uploadBatch(files)
const end = performance.now()
console.log(`⚡ Uploaded ${files.length} files in ${end - start}ms`)
console.log(`📊 Success rate: ${results.filter(r => r.status === 'fulfilled').length}/${files.length}`)
}
import { uploadSmart } from 'cloudku-uploader'
import sharp from 'sharp'
class ImageProcessor {
async processAndUpload(imageBuffer, options = {}) {
const {
width = 1920,
quality = 85,
format = 'jpeg',
expiry = '30d'
} = options
// Process image
const processed = await sharp(imageBuffer)
.resize(width, null, { withoutEnlargement: true })
.jpeg({ quality })
.toBuffer()
// Upload processed image
const result = await uploadSmart(
processed,
`processed-${Date.now()}.${format}`,
expiry
)
return {
...result,
originalSize: imageBuffer.length,
processedSize: processed.length,
compression: `${((1 - processed.length / imageBuffer.length) * 100).toFixed(1)}%`
}
}
}
import { uploadSmart } from 'cloudku-uploader'
class UploadAnalytics {
constructor() {
this.metrics = {
uploads: 0,
successes: 0,
failures: 0,
totalSize: 0,
avgResponseTime: 0
}
}
async trackUpload(buffer, filename, expiry) {
const start = Date.now()
this.metrics.uploads++
this.metrics.totalSize += buffer.length
try {
const result = await uploadSmart(buffer, filename, expiry)
if (result.status === 'success') {
this.metrics.successes++
} else {
this.metrics.failures++
}
const responseTime = Date.now() - start
this.metrics.avgResponseTime =
(this.metrics.avgResponseTime + responseTime) / 2
return result
} catch (error) {
this.metrics.failures++
throw error
}
}
getStats() {
return {
...this.metrics,
successRate: `${(this.metrics.successes / this.metrics.uploads * 100).toFixed(2)}%`,
totalSizeMB: `${(this.metrics.totalSize / 1024 / 1024).toFixed(2)} MB`,
avgResponseTimeMs: `${this.metrics.avgResponseTime.toFixed(0)} ms`
}
}
}
Complete documentation and examples |
Package info and version history |
Instant technical assistance |
Custom solutions and SLA |
- 📖 Documentation Hub - Comprehensive guides and tutorials
- 💡 Stack Overflow - Tagged questions:
cloudku-uploader
- 🐛 GitHub Issues - Bug reports and feature requests
- 💬 Discord Community - Real-time developer chat
- 📺 YouTube Channel - Video tutorials and updates
- 🐦 Twitter Updates - Follow @CloudKuImages
This project is licensed under the MIT License - see the LICENSE file for details.
- 🚀 Built with modern JavaScript standards (ES2022+)
- 🧪 Tested across Node.js, Bun, Deno, and browsers
- 🌐 Compliant with GDPR and international privacy laws
- 📦 Following semantic versioning (SemVer)
- 🔒 Security reviewed and OWASP compliant
# Get started in seconds
npm install cloudku-uploader
Made with ❤️ by AlfiDev | Powered by CloudKu Infrastructure
Empowering developers worldwide with reliable, lightning-fast file uploads
⭐ Star us on GitHub • 🐦 Follow on Twitter • 📧 Subscribe to Updates • 💬 Join Discord