]*>/g, '');
html = html.replace(/<\/en-note>/g, '
');
// Handle media
html = html.replace(/]*)>/g, (match, attrs) => {
const hash = this.extractHash(attrs);
return `
`;
});
// Handle to-dos
html = html.replace(/]*)\/>/g, (match, attrs) => {
const checked = attrs.includes('checked="true"');
return ``;
});
return html;
}
}
```
#### OPML Import
Import hierarchical OPML outlines:
```javascript
class OpmlImporter {
async importOpml(opmlFile, parentNoteId) {
const parser = new OpmlParser();
const outline = await parser.parse(opmlFile);
return this.importOutline(outline.body, parentNoteId);
}
async importOutline(outline, parentNoteId) {
const note = await api.createNote({
parentNoteId,
title: outline.text || 'Untitled',
content: outline.notes || '',
type: 'text'
});
// Import attributes from OPML
if (outline.attributes) {
for (const [key, value] of Object.entries(outline.attributes)) {
note.setLabel(key, value);
}
}
// Import children recursively
if (outline.children) {
for (const child of outline.children) {
await this.importOutline(child, note.noteId);
}
}
return note;
}
}
```
## Performance Optimization
### Batch Processing
Optimize large import/export operations:
```javascript
class BatchProcessor {
async processBatch(items, processor, options = {}) {
const {
batchSize = 50,
parallel = false,
onProgress = () => {}
} = options;
let processed = 0;
const results = [];
for (let i = 0; i < items.length; i += batchSize) {
const batch = items.slice(i, i + batchSize);
let batchResults;
if (parallel) {
batchResults = await Promise.all(
batch.map(item => processor(item))
);
} else {
batchResults = [];
for (const item of batch) {
batchResults.push(await processor(item));
}
}
results.push(...batchResults);
processed += batch.length;
onProgress({
processed,
total: items.length,
percentage: (processed / items.length) * 100
});
}
return results;
}
}
```
### Memory Management
Handle large files without memory issues:
```javascript
class MemoryEfficientProcessor {
async processLargeFile(filePath, processor) {
const stream = fs.createReadStream(filePath, {
highWaterMark: 16 * 1024 // 16KB chunks
});
const lineReader = readline.createInterface({
input: stream,
crlfDelay: Infinity
});
let buffer = [];
const bufferSize = 100;
for await (const line of lineReader) {
buffer.push(line);
if (buffer.length >= bufferSize) {
await processor(buffer);
buffer = [];
}
}
// Process remaining items
if (buffer.length > 0) {
await processor(buffer);
}
}
}
```
## Troubleshooting
### Import Failures
**Symptom:** Import process fails or hangs.
**Solutions:**
- Check file format compatibility
- Verify file isn't corrupted
- Reduce batch size for large imports
- Check available disk space
- Review error logs for specific issues
### Data Loss During Export
**Symptom:** Some data missing in exported files.
**Solutions:**
- Verify export options include all data types
- Check for unsupported content types
- Ensure proper permissions for all notes
- Review export logs for skipped items
### Format Conversion Issues
**Symptom:** Content appears broken after import.
**Solutions:**
- Verify source format detection
- Check character encoding
- Review transformation rules
- Test with smaller sample first
## Best Practices
1. **Always Create Backups**
- Backup before large imports
- Test imports on copy first
- Keep original files
2. **Validate Data Integrity**
- Verify import completeness
- Check content preservation
- Validate relationships
3. **Optimize for Performance**
- Use appropriate batch sizes
- Enable compression for exports
- Stream large files
4. **Document Migrations**
- Keep import/export logs
- Document mapping rules
- Track transformation decisions
5. **Test Thoroughly**
- Test with sample data first
- Verify all content types
- Check edge cases
## Related Topics
- [Basic Import/Export](../Import-Export.md)
- [Database Backup](../Maintenance/Backup.md)
- [Data Migration](../Migration.md)
- [File Attachments](../Attachments.md)