correctly handle NFD content in media DB from older Anki versions
This commit is contained in:
parent
dfa10f5a1c
commit
617b18ff49
@ -20,6 +20,7 @@ use std::collections::HashMap;
|
|||||||
use std::io::{Read, Write};
|
use std::io::{Read, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{io, time};
|
use std::{io, time};
|
||||||
|
use unicode_normalization::is_nfc;
|
||||||
|
|
||||||
static SYNC_MAX_FILES: usize = 25;
|
static SYNC_MAX_FILES: usize = 25;
|
||||||
static SYNC_MAX_BYTES: usize = (2.5 * 1024.0 * 1024.0) as usize;
|
static SYNC_MAX_BYTES: usize = (2.5 * 1024.0 * 1024.0) as usize;
|
||||||
@ -717,6 +718,16 @@ fn zip_files<'a>(
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_vendor = "apple")]
|
||||||
|
{
|
||||||
|
if !is_nfc(&file.fname) {
|
||||||
|
// older Anki versions stored non-normalized filenames in the DB; clean them up
|
||||||
|
debug!(log, "clean up non-nfc entry"; "fname"=>&file.fname);
|
||||||
|
invalid_entries.push(&file.fname);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let file_data = if file.sha1.is_some() {
|
let file_data = if file.sha1.is_some() {
|
||||||
match data_for_file(media_folder, &file.fname) {
|
match data_for_file(media_folder, &file.fname) {
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
|
Loading…
Reference in New Issue
Block a user