зеркало из https://github.com/mozilla/gecko-dev.git
servo: Merge #12406 - Refactor FileAPI implementation (from izgzhen:refactor-file); r=Manishearth
Most are simple refactoring, cleanups and improvements, but still involving two slightly notable changes: + In `filemanager`, now we read the file content based on requested `RelativePos` by `seek` and `read_exact` (rather than `read_to_end` then do slicing). This strategy might be again adjusted in future performance tuning but certainly better than nothing. + Also, I cached more file meta-info in both sides and left a block of comment on `filemanager`'s file reading mentioning the snapshot-state problem (not solved now though). r? @Manishearth <!-- Please describe your changes on the following line: --> --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: --> - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. --> Source-Repo: https://github.com/servo/servo Source-Revision: 665559556f5aeac5820e17684b14311aa3767c0c
This commit is contained in:
Родитель
2ae55f3f56
Коммит
f66935aba8
|
@ -8,26 +8,23 @@ use hyper::http::RawStatus;
|
||||||
use mime::{Mime, Attr};
|
use mime::{Mime, Attr};
|
||||||
use mime_classifier::MimeClassifier;
|
use mime_classifier::MimeClassifier;
|
||||||
use net_traits::ProgressMsg::Done;
|
use net_traits::ProgressMsg::Done;
|
||||||
use net_traits::blob_url_store::BlobURLStoreEntry;
|
use net_traits::blob_url_store::BlobBuf;
|
||||||
use net_traits::filemanager_thread::RelativePos;
|
|
||||||
use net_traits::response::HttpsState;
|
use net_traits::response::HttpsState;
|
||||||
use net_traits::{LoadConsumer, LoadData, Metadata};
|
use net_traits::{LoadConsumer, LoadData, Metadata};
|
||||||
use resource_thread::start_sending_sniffed_opt;
|
use resource_thread::start_sending_sniffed_opt;
|
||||||
use std::ops::Index;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
// TODO: Check on GET
|
// TODO: Check on GET
|
||||||
// https://w3c.github.io/FileAPI/#requestResponseModel
|
// https://w3c.github.io/FileAPI/#requestResponseModel
|
||||||
|
|
||||||
pub fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
|
pub fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
|
||||||
classifier: Arc<MimeClassifier>, opt_filename: Option<String>,
|
classifier: Arc<MimeClassifier>, blob_buf: BlobBuf) {
|
||||||
rel_pos: RelativePos, entry: BlobURLStoreEntry) {
|
let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
|
||||||
let content_type: Mime = entry.type_string.parse().unwrap_or(mime!(Text / Plain));
|
|
||||||
let charset = content_type.get_param(Attr::Charset);
|
let charset = content_type.get_param(Attr::Charset);
|
||||||
|
|
||||||
let mut headers = Headers::new();
|
let mut headers = Headers::new();
|
||||||
|
|
||||||
if let Some(name) = opt_filename {
|
if let Some(name) = blob_buf.filename {
|
||||||
let charset = charset.and_then(|c| c.as_str().parse().ok());
|
let charset = charset.and_then(|c| c.as_str().parse().ok());
|
||||||
headers.set(ContentDisposition {
|
headers.set(ContentDisposition {
|
||||||
disposition: DispositionType::Inline,
|
disposition: DispositionType::Inline,
|
||||||
|
@ -38,10 +35,8 @@ pub fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let range = rel_pos.to_abs_range(entry.size as usize);
|
|
||||||
|
|
||||||
headers.set(ContentType(content_type.clone()));
|
headers.set(ContentType(content_type.clone()));
|
||||||
headers.set(ContentLength(range.len() as u64));
|
headers.set(ContentLength(blob_buf.size as u64));
|
||||||
|
|
||||||
let metadata = Metadata {
|
let metadata = Metadata {
|
||||||
final_url: load_data.url.clone(),
|
final_url: load_data.url.clone(),
|
||||||
|
@ -55,7 +50,7 @@ pub fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
|
||||||
|
|
||||||
if let Ok(chan) =
|
if let Ok(chan) =
|
||||||
start_sending_sniffed_opt(start_chan, metadata, classifier,
|
start_sending_sniffed_opt(start_chan, metadata, classifier,
|
||||||
&entry.bytes.index(range), load_data.context.clone()) {
|
&blob_buf.bytes, load_data.context.clone()) {
|
||||||
let _ = chan.send(Done(Ok(())));
|
let _ = chan.send(Done(Ok(())));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,14 +6,14 @@ use blob_loader::load_blob;
|
||||||
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
|
||||||
use mime_classifier::MimeClassifier;
|
use mime_classifier::MimeClassifier;
|
||||||
use mime_guess::guess_mime_type_opt;
|
use mime_guess::guess_mime_type_opt;
|
||||||
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreError, parse_blob_url};
|
use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError, parse_blob_url};
|
||||||
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern, FileOrigin};
|
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern, FileOrigin};
|
||||||
use net_traits::filemanager_thread::{SelectedFile, RelativePos, FileManagerThreadError, SelectedFileId};
|
use net_traits::filemanager_thread::{SelectedFile, RelativePos, FileManagerThreadError, SelectedFileId};
|
||||||
use net_traits::{LoadConsumer, LoadData, NetworkError};
|
use net_traits::{LoadConsumer, LoadData, NetworkError};
|
||||||
use resource_thread::send_error;
|
use resource_thread::send_error;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Read;
|
use std::io::{Read, Seek, SeekFrom};
|
||||||
use std::ops::Index;
|
use std::ops::Index;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::{self, AtomicUsize, AtomicBool, Ordering};
|
use std::sync::atomic::{self, AtomicUsize, AtomicBool, Ordering};
|
||||||
|
@ -29,6 +29,9 @@ pub trait FileManagerThreadFactory<UI: 'static + UIProvider> {
|
||||||
fn new(&'static UI) -> Self;
|
fn new(&'static UI) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Trait that provider of file-dialog UI should implement.
|
||||||
|
/// It will be used to initialize a generic FileManager.
|
||||||
|
/// For example, we can choose a dummy UI for testing purpose.
|
||||||
pub trait UIProvider where Self: Sync {
|
pub trait UIProvider where Self: Sync {
|
||||||
fn open_file_dialog(&self, path: &str, patterns: Vec<FilterPattern>) -> Option<String>;
|
fn open_file_dialog(&self, path: &str, patterns: Vec<FilterPattern>) -> Option<String>;
|
||||||
|
|
||||||
|
@ -92,22 +95,38 @@ impl<UI: 'static + UIProvider> FileManagerThreadFactory<UI> for IpcSender<FileMa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// FileManagerStore's entry
|
||||||
struct FileStoreEntry {
|
struct FileStoreEntry {
|
||||||
/// Origin of the entry's "creator"
|
/// Origin of the entry's "creator"
|
||||||
origin: FileOrigin,
|
origin: FileOrigin,
|
||||||
/// Backend implementation
|
/// Backend implementation
|
||||||
file_impl: FileImpl,
|
file_impl: FileImpl,
|
||||||
/// Reference counting
|
/// Number of `FileImpl::Sliced` entries in `FileManagerStore`
|
||||||
|
/// that has a reference (FileID) to this entry
|
||||||
refs: AtomicUsize,
|
refs: AtomicUsize,
|
||||||
/// UUID key's validity as Blob URL
|
/// UUIDs only become valid blob URIs when explicitly requested
|
||||||
|
/// by the user with createObjectURL. Validity can be revoked as well.
|
||||||
|
/// (The UUID is the one that maps to this entry in `FileManagerStore`)
|
||||||
is_valid_url: AtomicBool
|
is_valid_url: AtomicBool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct FileMetaData {
|
||||||
|
path: PathBuf,
|
||||||
|
/// Modified time in UNIX Epoch format
|
||||||
|
modified: u64,
|
||||||
|
size: u64,
|
||||||
|
}
|
||||||
|
|
||||||
/// File backend implementation
|
/// File backend implementation
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
enum FileImpl {
|
enum FileImpl {
|
||||||
PathOnly(PathBuf),
|
/// Metadata of on-disk file
|
||||||
Memory(BlobURLStoreEntry),
|
MetaDataOnly(FileMetaData),
|
||||||
|
/// In-memory Blob buffer object
|
||||||
|
Memory(BlobBuf),
|
||||||
|
/// A reference to parent entry in `FileManagerStore`,
|
||||||
|
/// representing a sliced version of the parent entry data
|
||||||
Sliced(Uuid, RelativePos),
|
Sliced(Uuid, RelativePos),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,13 +164,15 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
spawn_named("read file".to_owned(), move || {
|
spawn_named("read file".to_owned(), move || {
|
||||||
match store.try_read_file(id, origin) {
|
match store.try_read_file(id, origin) {
|
||||||
Ok(buffer) => { let _ = sender.send(Ok(buffer)); }
|
Ok(buffer) => { let _ = sender.send(Ok(buffer)); }
|
||||||
Err(_) => { let _ = sender.send(Err(FileManagerThreadError::ReadFileError)); }
|
Err(e) => {
|
||||||
|
let _ = sender.send(Err(FileManagerThreadError::BlobURLStoreError(e)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
FileManagerThreadMsg::PromoteMemory(entry, sender, origin) => {
|
FileManagerThreadMsg::PromoteMemory(blob_buf, sender, origin) => {
|
||||||
spawn_named("transfer memory".to_owned(), move || {
|
spawn_named("transfer memory".to_owned(), move || {
|
||||||
store.promote_memory(entry, sender, origin);
|
store.promote_memory(blob_buf, sender, origin);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) =>{
|
FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) =>{
|
||||||
|
@ -167,8 +188,7 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
send_error(load_data.url.clone(), format_err, consumer);
|
send_error(load_data.url.clone(), format_err, consumer);
|
||||||
}
|
}
|
||||||
Some((id, _fragment)) => {
|
Some((id, _fragment)) => {
|
||||||
// check_url_validity is true since content is requested by this URL
|
self.process_request(load_data, consumer, id);
|
||||||
self.process_request(load_data, consumer, RelativePos::full_range(), id, true);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -214,55 +234,22 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_request(&self, load_data: LoadData, consumer: LoadConsumer,
|
fn process_request(&self, load_data: LoadData, consumer: LoadConsumer, id: Uuid) {
|
||||||
rel_pos: RelativePos, id: Uuid, check_url_validity: bool) {
|
|
||||||
let origin_in = load_data.url.origin().unicode_serialization();
|
let origin_in = load_data.url.origin().unicode_serialization();
|
||||||
match self.store.get_impl(&id, &origin_in, check_url_validity) {
|
// check_url_validity is true since content is requested by this URL
|
||||||
Ok(file_impl) => {
|
match self.store.get_blob_buf(&id, &origin_in, RelativePos::full_range(), true) {
|
||||||
match file_impl {
|
Ok(blob_buf) => {
|
||||||
FileImpl::Memory(buffered) => {
|
let classifier = self.classifier.clone();
|
||||||
let classifier = self.classifier.clone();
|
spawn_named("load blob".to_owned(), move || load_blob(load_data, consumer, classifier, blob_buf));
|
||||||
spawn_named("load blob".to_owned(), move ||
|
|
||||||
load_blob(load_data, consumer, classifier,
|
|
||||||
None, rel_pos, buffered));
|
|
||||||
}
|
|
||||||
FileImpl::PathOnly(filepath) => {
|
|
||||||
let opt_filename = filepath.file_name()
|
|
||||||
.and_then(|osstr| osstr.to_str())
|
|
||||||
.map(|s| s.to_string());
|
|
||||||
|
|
||||||
let mut bytes = vec![];
|
|
||||||
let mut handler = File::open(&filepath).unwrap();
|
|
||||||
let mime = guess_mime_type_opt(filepath);
|
|
||||||
let size = handler.read_to_end(&mut bytes).unwrap();
|
|
||||||
|
|
||||||
let entry = BlobURLStoreEntry {
|
|
||||||
type_string: match mime {
|
|
||||||
Some(x) => format!("{}", x),
|
|
||||||
None => "".to_string(),
|
|
||||||
},
|
|
||||||
size: size as u64,
|
|
||||||
bytes: bytes,
|
|
||||||
};
|
|
||||||
let classifier = self.classifier.clone();
|
|
||||||
spawn_named("load blob".to_owned(), move ||
|
|
||||||
load_blob(load_data, consumer, classifier,
|
|
||||||
opt_filename, rel_pos, entry));
|
|
||||||
},
|
|
||||||
FileImpl::Sliced(id, rel_pos) => {
|
|
||||||
// Next time we don't need to check validity since
|
|
||||||
// we have already done that for requesting URL
|
|
||||||
self.process_request(load_data, consumer, rel_pos, id, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
send_error(load_data.url.clone(), NetworkError::Internal(format!("{:?}", e)), consumer);
|
|
||||||
}
|
}
|
||||||
|
Err(e) => send_error(load_data.url.clone(), NetworkError::Internal(format!("{:?}", e)), consumer),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// File manager's data store. It maintains a thread-safe mapping
|
||||||
|
/// from FileID to FileStoreEntry which might have different backend implementation.
|
||||||
|
/// Access to the content is encapsulated as methods of this struct.
|
||||||
struct FileManagerStore<UI: 'static + UIProvider> {
|
struct FileManagerStore<UI: 'static + UIProvider> {
|
||||||
entries: RwLock<HashMap<Uuid, FileStoreEntry>>,
|
entries: RwLock<HashMap<Uuid, FileStoreEntry>>,
|
||||||
ui: &'static UI,
|
ui: &'static UI,
|
||||||
|
@ -358,11 +345,8 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
|
||||||
match opt_s {
|
match opt_s {
|
||||||
Some(s) => {
|
Some(s) => {
|
||||||
let selected_path = Path::new(&s);
|
let selected_path = Path::new(&s);
|
||||||
|
let result = self.create_entry(selected_path, &origin);
|
||||||
match self.create_entry(selected_path, &origin) {
|
let _ = sender.send(result);
|
||||||
Some(triple) => { let _ = sender.send(Ok(triple)); }
|
|
||||||
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let _ = sender.send(Err(FileManagerThreadError::UserCancelled));
|
let _ = sender.send(Err(FileManagerThreadError::UserCancelled));
|
||||||
|
@ -395,8 +379,11 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
|
||||||
|
|
||||||
for path in selected_paths {
|
for path in selected_paths {
|
||||||
match self.create_entry(path, &origin) {
|
match self.create_entry(path, &origin) {
|
||||||
Some(triple) => replies.push(triple),
|
Ok(triple) => replies.push(triple),
|
||||||
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
|
Err(e) => {
|
||||||
|
let _ = sender.send(Err(e));
|
||||||
|
return;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -409,78 +396,114 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_entry(&self, file_path: &Path, origin: &str) -> Option<SelectedFile> {
|
fn create_entry(&self, file_path: &Path, origin: &str) -> Result<SelectedFile, FileManagerThreadError> {
|
||||||
match File::open(file_path) {
|
use net_traits::filemanager_thread::FileManagerThreadError::FileSystemError;
|
||||||
Ok(handler) => {
|
|
||||||
let id = Uuid::new_v4();
|
|
||||||
let file_impl = FileImpl::PathOnly(file_path.to_path_buf());
|
|
||||||
|
|
||||||
self.insert(id, FileStoreEntry {
|
let handler = try!(File::open(file_path).map_err(|e| FileSystemError(e.to_string())));
|
||||||
origin: origin.to_string(),
|
let metadata = try!(handler.metadata().map_err(|e| FileSystemError(e.to_string())));
|
||||||
file_impl: file_impl,
|
let modified = try!(metadata.modified().map_err(|e| FileSystemError(e.to_string())));
|
||||||
refs: AtomicUsize::new(1),
|
let elapsed = try!(modified.elapsed().map_err(|e| FileSystemError(e.to_string())));
|
||||||
// Invalid here since create_entry is called by file selection
|
// Unix Epoch: https://doc.servo.org/std/time/constant.UNIX_EPOCH.html
|
||||||
is_valid_url: AtomicBool::new(false),
|
let modified_epoch = elapsed.as_secs() * 1000 + elapsed.subsec_nanos() as u64 / 1000000;
|
||||||
});
|
let file_size = metadata.len();
|
||||||
|
let file_name = try!(file_path.file_name().ok_or(FileSystemError("Invalid filepath".to_string())));
|
||||||
|
|
||||||
// Unix Epoch: https://doc.servo.org/std/time/constant.UNIX_EPOCH.html
|
let file_impl = FileImpl::MetaDataOnly(FileMetaData {
|
||||||
let epoch = handler.metadata().and_then(|metadata| metadata.modified()).map_err(|_| ())
|
path: file_path.to_path_buf(),
|
||||||
.and_then(|systime| systime.elapsed().map_err(|_| ()))
|
modified: modified_epoch,
|
||||||
.and_then(|elapsed| {
|
size: file_size,
|
||||||
let secs = elapsed.as_secs();
|
});
|
||||||
let nsecs = elapsed.subsec_nanos();
|
|
||||||
let msecs = secs * 1000 + nsecs as u64 / 1000000;
|
|
||||||
Ok(msecs)
|
|
||||||
});
|
|
||||||
|
|
||||||
let filename = file_path.file_name();
|
let id = Uuid::new_v4();
|
||||||
|
|
||||||
match (epoch, filename) {
|
self.insert(id, FileStoreEntry {
|
||||||
(Ok(epoch), Some(filename)) => {
|
origin: origin.to_string(),
|
||||||
let filename_path = Path::new(filename);
|
file_impl: file_impl,
|
||||||
let mime = guess_mime_type_opt(filename_path);
|
refs: AtomicUsize::new(1),
|
||||||
Some(SelectedFile {
|
// Invalid here since create_entry is called by file selection
|
||||||
id: SelectedFileId(id.simple().to_string()),
|
is_valid_url: AtomicBool::new(false),
|
||||||
filename: filename_path.to_path_buf(),
|
});
|
||||||
modified: epoch,
|
|
||||||
type_string: match mime {
|
let filename_path = Path::new(file_name);
|
||||||
Some(x) => format!("{}", x),
|
let type_string = match guess_mime_type_opt(filename_path) {
|
||||||
None => "".to_string(),
|
Some(x) => format!("{}", x),
|
||||||
},
|
None => "".to_string(),
|
||||||
})
|
};
|
||||||
}
|
|
||||||
_ => None
|
Ok(SelectedFile {
|
||||||
|
id: SelectedFileId(id.simple().to_string()),
|
||||||
|
filename: filename_path.to_path_buf(),
|
||||||
|
modified: modified_epoch,
|
||||||
|
size: file_size,
|
||||||
|
type_string: type_string,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_blob_buf(&self, id: &Uuid, origin_in: &FileOrigin, rel_pos: RelativePos,
|
||||||
|
check_url_validity: bool) -> Result<BlobBuf, BlobURLStoreError> {
|
||||||
|
let file_impl = try!(self.get_impl(id, origin_in, check_url_validity));
|
||||||
|
match file_impl {
|
||||||
|
FileImpl::Memory(buf) => {
|
||||||
|
let range = rel_pos.to_abs_range(buf.size as usize);
|
||||||
|
Ok(BlobBuf {
|
||||||
|
filename: None,
|
||||||
|
type_string: buf.type_string,
|
||||||
|
size: range.len() as u64,
|
||||||
|
bytes: buf.bytes.index(range).to_vec(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
FileImpl::MetaDataOnly(metadata) => {
|
||||||
|
/* XXX: Snapshot state check (optional) https://w3c.github.io/FileAPI/#snapshot-state.
|
||||||
|
Concretely, here we create another handler, and this handler might not
|
||||||
|
has the same underlying file state (meta-info plus content) as the time
|
||||||
|
create_entry is called.
|
||||||
|
*/
|
||||||
|
|
||||||
|
let opt_filename = metadata.path.file_name()
|
||||||
|
.and_then(|osstr| osstr.to_str())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
|
let mime = guess_mime_type_opt(metadata.path.clone());
|
||||||
|
let range = rel_pos.to_abs_range(metadata.size as usize);
|
||||||
|
|
||||||
|
let mut handler = try!(File::open(&metadata.path)
|
||||||
|
.map_err(|e| BlobURLStoreError::External(e.to_string())));
|
||||||
|
let seeked_start = try!(handler.seek(SeekFrom::Start(range.start as u64))
|
||||||
|
.map_err(|e| BlobURLStoreError::External(e.to_string())));
|
||||||
|
|
||||||
|
if seeked_start == (range.start as u64) {
|
||||||
|
let mut bytes = vec![0; range.len()];
|
||||||
|
try!(handler.read_exact(&mut bytes)
|
||||||
|
.map_err(|e| BlobURLStoreError::External(e.to_string())));
|
||||||
|
|
||||||
|
Ok(BlobBuf {
|
||||||
|
filename: opt_filename,
|
||||||
|
type_string: match mime {
|
||||||
|
Some(x) => format!("{}", x),
|
||||||
|
None => "".to_string(),
|
||||||
|
},
|
||||||
|
size: range.len() as u64,
|
||||||
|
bytes: bytes,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(BlobURLStoreError::InvalidEntry)
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Err(_) => None
|
FileImpl::Sliced(parent_id, inner_rel_pos) => {
|
||||||
|
// Next time we don't need to check validity since
|
||||||
|
// we have already done that for requesting URL if necessary
|
||||||
|
self.get_blob_buf(&parent_id, origin_in, rel_pos.slice_inner(&inner_rel_pos), false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_read_file(&self, id: SelectedFileId, origin_in: FileOrigin) -> Result<Vec<u8>, BlobURLStoreError> {
|
fn try_read_file(&self, id: SelectedFileId, origin_in: FileOrigin) -> Result<Vec<u8>, BlobURLStoreError> {
|
||||||
let id = try!(Uuid::parse_str(&id.0).map_err(|_| BlobURLStoreError::InvalidFileID));
|
let id = try!(Uuid::parse_str(&id.0).map_err(|_| BlobURLStoreError::InvalidFileID));
|
||||||
|
|
||||||
match self.get_impl(&id, &origin_in, false) {
|
// No need to check URL validity in reading a file by FileReader
|
||||||
Ok(file_impl) => {
|
let blob_buf = try!(self.get_blob_buf(&id, &origin_in, RelativePos::full_range(), false));
|
||||||
match file_impl {
|
|
||||||
FileImpl::PathOnly(filepath) => {
|
Ok(blob_buf.bytes)
|
||||||
let mut buffer = vec![];
|
|
||||||
let mut handler = try!(File::open(filepath)
|
|
||||||
.map_err(|_| BlobURLStoreError::InvalidEntry));
|
|
||||||
try!(handler.read_to_end(&mut buffer)
|
|
||||||
.map_err(|_| BlobURLStoreError::External));
|
|
||||||
Ok(buffer)
|
|
||||||
},
|
|
||||||
FileImpl::Memory(buffered) => {
|
|
||||||
Ok(buffered.bytes)
|
|
||||||
},
|
|
||||||
FileImpl::Sliced(id, rel_pos) => {
|
|
||||||
self.try_read_file(SelectedFileId(id.simple().to_string()), origin_in)
|
|
||||||
.map(|bytes| bytes.index(rel_pos.to_abs_range(bytes.len())).to_vec())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e) => Err(e),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin,
|
fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin,
|
||||||
|
@ -525,14 +548,14 @@ impl <UI: 'static + UIProvider> FileManagerStore<UI> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn promote_memory(&self, entry: BlobURLStoreEntry,
|
fn promote_memory(&self, blob_buf: BlobBuf,
|
||||||
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>, origin: FileOrigin) {
|
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>, origin: FileOrigin) {
|
||||||
match Url::parse(&origin) { // parse to check sanity
|
match Url::parse(&origin) { // parse to check sanity
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
let id = Uuid::new_v4();
|
let id = Uuid::new_v4();
|
||||||
self.insert(id, FileStoreEntry {
|
self.insert(id, FileStoreEntry {
|
||||||
origin: origin.clone(),
|
origin: origin.clone(),
|
||||||
file_impl: FileImpl::Memory(entry),
|
file_impl: FileImpl::Memory(blob_buf),
|
||||||
refs: AtomicUsize::new(1),
|
refs: AtomicUsize::new(1),
|
||||||
// Valid here since PromoteMemory implies URL creation
|
// Valid here since PromoteMemory implies URL creation
|
||||||
is_valid_url: AtomicBool::new(true),
|
is_valid_url: AtomicBool::new(true),
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::str::FromStr;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Errors returns to BlobURLStoreMsg::Request
|
/// Errors returned to Blob URL Store request
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub enum BlobURLStoreError {
|
pub enum BlobURLStoreError {
|
||||||
/// Invalid File UUID
|
/// Invalid File UUID
|
||||||
|
@ -16,12 +16,13 @@ pub enum BlobURLStoreError {
|
||||||
/// Invalid entry content
|
/// Invalid entry content
|
||||||
InvalidEntry,
|
InvalidEntry,
|
||||||
/// External error, from like file system, I/O etc.
|
/// External error, from like file system, I/O etc.
|
||||||
External,
|
External(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Blob URL store entry, a packaged form of Blob DOM object
|
/// Standalone blob buffer object
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
pub struct BlobURLStoreEntry {
|
pub struct BlobBuf {
|
||||||
|
pub filename: Option<String>,
|
||||||
/// MIME type string
|
/// MIME type string
|
||||||
pub type_string: String,
|
pub type_string: String,
|
||||||
/// Size of content in bytes
|
/// Size of content in bytes
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||||
|
|
||||||
use blob_url_store::{BlobURLStoreEntry, BlobURLStoreError};
|
use blob_url_store::{BlobBuf, BlobURLStoreError};
|
||||||
use ipc_channel::ipc::IpcSender;
|
use ipc_channel::ipc::IpcSender;
|
||||||
use num_traits::ToPrimitive;
|
use num_traits::ToPrimitive;
|
||||||
use std::cmp::{max, min};
|
use std::cmp::{max, min};
|
||||||
|
@ -10,7 +10,8 @@ use std::ops::Range;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use super::{LoadConsumer, LoadData};
|
use super::{LoadConsumer, LoadData};
|
||||||
|
|
||||||
// HACK: We should send Origin directly instead of this in future, blocked on #11722
|
// HACK: Not really process-safe now, we should send Origin
|
||||||
|
// directly instead of this in future, blocked on #11722
|
||||||
/// File manager store entry's origin
|
/// File manager store entry's origin
|
||||||
pub type FileOrigin = String;
|
pub type FileOrigin = String;
|
||||||
|
|
||||||
|
@ -33,7 +34,7 @@ impl RelativePos {
|
||||||
pub fn full_range() -> RelativePos {
|
pub fn full_range() -> RelativePos {
|
||||||
RelativePos {
|
RelativePos {
|
||||||
start: 0,
|
start: 0,
|
||||||
end: Some(0),
|
end: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,20 +99,24 @@ impl RelativePos {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// XXX: We should opt to Uuid once it implements `Deserialize` and `Serialize`
|
||||||
|
/// FileID used in inter-process message
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct SelectedFileId(pub String);
|
pub struct SelectedFileId(pub String);
|
||||||
|
|
||||||
|
/// Response to file selection request
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
pub struct SelectedFile {
|
pub struct SelectedFile {
|
||||||
pub id: SelectedFileId,
|
pub id: SelectedFileId,
|
||||||
pub filename: PathBuf,
|
pub filename: PathBuf,
|
||||||
pub modified: u64,
|
pub modified: u64,
|
||||||
|
pub size: u64,
|
||||||
// https://w3c.github.io/FileAPI/#dfn-type
|
// https://w3c.github.io/FileAPI/#dfn-type
|
||||||
pub type_string: String,
|
pub type_string: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter for file selection
|
/// Filter for file selection;
|
||||||
/// the content is expected to be extension (e.g, "doc", without the prefixing ".")
|
/// the `String` content is expected to be extension (e.g, "doc", without the prefixing ".")
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct FilterPattern(pub String);
|
pub struct FilterPattern(pub String);
|
||||||
|
|
||||||
|
@ -131,7 +136,7 @@ pub enum FileManagerThreadMsg {
|
||||||
|
|
||||||
/// Add an entry as promoted memory-based blob and send back the associated FileID
|
/// Add an entry as promoted memory-based blob and send back the associated FileID
|
||||||
/// as part of a valid Blob URL
|
/// as part of a valid Blob URL
|
||||||
PromoteMemory(BlobURLStoreEntry, IpcSender<Result<SelectedFileId, BlobURLStoreError>>, FileOrigin),
|
PromoteMemory(BlobBuf, IpcSender<Result<SelectedFileId, BlobURLStoreError>>, FileOrigin),
|
||||||
|
|
||||||
/// Add a sliced entry pointing to the parent FileID, and send back the associated FileID
|
/// Add a sliced entry pointing to the parent FileID, and send back the associated FileID
|
||||||
/// as part of a valid Blob URL
|
/// as part of a valid Blob URL
|
||||||
|
@ -161,8 +166,8 @@ pub enum FileManagerThreadError {
|
||||||
InvalidSelection,
|
InvalidSelection,
|
||||||
/// The selection action is cancelled by user
|
/// The selection action is cancelled by user
|
||||||
UserCancelled,
|
UserCancelled,
|
||||||
/// Failure to process file information such as file name, modified time etc.
|
/// Errors returned from file system request
|
||||||
FileInfoProcessingError,
|
FileSystemError(String),
|
||||||
/// Failure to read the file content
|
/// Blob URL Store error
|
||||||
ReadFileError,
|
BlobURLStoreError(BlobURLStoreError),
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,6 +79,7 @@ use std::collections::{BTreeMap, HashMap, HashSet};
|
||||||
use std::hash::{BuildHasher, Hash};
|
use std::hash::{BuildHasher, Hash};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicBool, AtomicUsize};
|
use std::sync::atomic::{AtomicBool, AtomicUsize};
|
||||||
|
@ -331,6 +332,7 @@ no_jsmanaged_fields!(SystemTime);
|
||||||
no_jsmanaged_fields!(SelectedFileId);
|
no_jsmanaged_fields!(SelectedFileId);
|
||||||
no_jsmanaged_fields!(RelativePos);
|
no_jsmanaged_fields!(RelativePos);
|
||||||
no_jsmanaged_fields!(OpaqueStyleAndLayoutData);
|
no_jsmanaged_fields!(OpaqueStyleAndLayoutData);
|
||||||
|
no_jsmanaged_fields!(PathBuf);
|
||||||
no_jsmanaged_fields!(CSSErrorReporter);
|
no_jsmanaged_fields!(CSSErrorReporter);
|
||||||
no_jsmanaged_fields!(WebGLBufferId);
|
no_jsmanaged_fields!(WebGLBufferId);
|
||||||
no_jsmanaged_fields!(WebGLFramebufferId);
|
no_jsmanaged_fields!(WebGLFramebufferId);
|
||||||
|
|
|
@ -15,17 +15,29 @@ use encoding::all::UTF_8;
|
||||||
use encoding::types::{EncoderTrap, Encoding};
|
use encoding::types::{EncoderTrap, Encoding};
|
||||||
use ipc_channel::ipc;
|
use ipc_channel::ipc;
|
||||||
use net_traits::IpcSend;
|
use net_traits::IpcSend;
|
||||||
use net_traits::blob_url_store::BlobURLStoreEntry;
|
use net_traits::blob_url_store::BlobBuf;
|
||||||
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId, RelativePos};
|
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId, RelativePos};
|
||||||
use std::ascii::AsciiExt;
|
use std::ascii::AsciiExt;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::ops::Index;
|
use std::ops::Index;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
/// File-based blob
|
||||||
|
#[derive(JSTraceable)]
|
||||||
|
pub struct FileBlob {
|
||||||
|
id: SelectedFileId,
|
||||||
|
name: PathBuf,
|
||||||
|
cache: DOMRefCell<Option<Vec<u8>>>,
|
||||||
|
size: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Blob backend implementation
|
||||||
#[must_root]
|
#[must_root]
|
||||||
#[derive(JSTraceable)]
|
#[derive(JSTraceable)]
|
||||||
pub enum BlobImpl {
|
pub enum BlobImpl {
|
||||||
/// File-based blob, including id and possibly cached content
|
/// File-based blob
|
||||||
File(SelectedFileId, DOMRefCell<Option<Vec<u8>>>),
|
File(FileBlob),
|
||||||
/// Memory-based blob
|
/// Memory-based blob
|
||||||
Memory(Vec<u8>),
|
Memory(Vec<u8>),
|
||||||
/// Sliced blob, including parent blob and
|
/// Sliced blob, including parent blob and
|
||||||
|
@ -42,8 +54,13 @@ impl BlobImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct file-backed BlobImpl from File ID
|
/// Construct file-backed BlobImpl from File ID
|
||||||
pub fn new_from_file(file_id: SelectedFileId) -> BlobImpl {
|
pub fn new_from_file(file_id: SelectedFileId, name: PathBuf, size: u64) -> BlobImpl {
|
||||||
BlobImpl::File(file_id, DOMRefCell::new(None))
|
BlobImpl::File(FileBlob {
|
||||||
|
id: file_id,
|
||||||
|
name: name,
|
||||||
|
cache: DOMRefCell::new(None),
|
||||||
|
size: size,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,8 +96,8 @@ impl Blob {
|
||||||
relativeContentType: DOMString) -> Root<Blob> {
|
relativeContentType: DOMString) -> Root<Blob> {
|
||||||
let global = parent.global();
|
let global = parent.global();
|
||||||
let blob_impl = match *parent.blob_impl.borrow() {
|
let blob_impl = match *parent.blob_impl.borrow() {
|
||||||
BlobImpl::File(ref id, _) => {
|
BlobImpl::File(ref f) => {
|
||||||
inc_ref_id(global.r(), id.clone());
|
inc_ref_id(global.r(), f.id.clone());
|
||||||
|
|
||||||
// Create new parent node
|
// Create new parent node
|
||||||
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
|
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
|
||||||
|
@ -93,8 +110,8 @@ impl Blob {
|
||||||
// Adjust the slicing position, using same parent
|
// Adjust the slicing position, using same parent
|
||||||
let new_rel_pos = old_rel_pos.slice_inner(&rel_pos);
|
let new_rel_pos = old_rel_pos.slice_inner(&rel_pos);
|
||||||
|
|
||||||
if let BlobImpl::File(ref id, _) = *grandparent.blob_impl.borrow() {
|
if let BlobImpl::File(ref f) = *grandparent.blob_impl.borrow() {
|
||||||
inc_ref_id(global.r(), id.clone());
|
inc_ref_id(global.r(), f.id.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
BlobImpl::Sliced(grandparent.clone(), new_rel_pos)
|
BlobImpl::Sliced(grandparent.clone(), new_rel_pos)
|
||||||
|
@ -124,22 +141,22 @@ impl Blob {
|
||||||
/// Get a slice to inner data, this might incur synchronous read and caching
|
/// Get a slice to inner data, this might incur synchronous read and caching
|
||||||
pub fn get_bytes(&self) -> Result<Vec<u8>, ()> {
|
pub fn get_bytes(&self) -> Result<Vec<u8>, ()> {
|
||||||
match *self.blob_impl.borrow() {
|
match *self.blob_impl.borrow() {
|
||||||
BlobImpl::File(ref id, ref cached) => {
|
BlobImpl::File(ref f) => {
|
||||||
let buffer = match *cached.borrow() {
|
let (buffer, is_new_buffer) = match *f.cache.borrow() {
|
||||||
Some(ref s) => Ok(s.clone()),
|
Some(ref bytes) => (bytes.clone(), false),
|
||||||
None => {
|
None => {
|
||||||
let global = self.global();
|
let global = self.global();
|
||||||
let s = read_file(global.r(), id.clone())?;
|
let bytes = read_file(global.r(), f.id.clone())?;
|
||||||
Ok(s)
|
(bytes, true)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Cache
|
// Cache
|
||||||
if let Ok(buf) = buffer.clone() {
|
if is_new_buffer {
|
||||||
*cached.borrow_mut() = Some(buf);
|
*f.cache.borrow_mut() = Some(buffer.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
buffer
|
Ok(buffer)
|
||||||
}
|
}
|
||||||
BlobImpl::Memory(ref s) => Ok(s.clone()),
|
BlobImpl::Memory(ref s) => Ok(s.clone()),
|
||||||
BlobImpl::Sliced(ref parent, ref rel_pos) => {
|
BlobImpl::Sliced(ref parent, ref rel_pos) => {
|
||||||
|
@ -155,16 +172,16 @@ impl Blob {
|
||||||
/// used by URL.createObjectURL
|
/// used by URL.createObjectURL
|
||||||
pub fn get_blob_url_id(&self) -> SelectedFileId {
|
pub fn get_blob_url_id(&self) -> SelectedFileId {
|
||||||
match *self.blob_impl.borrow() {
|
match *self.blob_impl.borrow() {
|
||||||
BlobImpl::File(ref id, _) => {
|
BlobImpl::File(ref f) => {
|
||||||
let global = self.global();
|
let global = self.global();
|
||||||
let origin = global.r().get_url().origin().unicode_serialization();
|
let origin = global.r().get_url().origin().unicode_serialization();
|
||||||
let filemanager = global.r().resource_threads().sender();
|
let filemanager = global.r().resource_threads().sender();
|
||||||
let (tx, rx) = ipc::channel().unwrap();
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
|
|
||||||
let _ = filemanager.send(FileManagerThreadMsg::ActivateBlobURL(id.clone(), tx, origin.clone()));
|
let _ = filemanager.send(FileManagerThreadMsg::ActivateBlobURL(f.id.clone(), tx, origin.clone()));
|
||||||
|
|
||||||
match rx.recv().unwrap() {
|
match rx.recv().unwrap() {
|
||||||
Ok(_) => id.clone(),
|
Ok(_) => f.id.clone(),
|
||||||
Err(_) => SelectedFileId("".to_string()) // Return a dummy id on error
|
Err(_) => SelectedFileId("".to_string()) // Return a dummy id on error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -176,8 +193,8 @@ impl Blob {
|
||||||
// Return dummy id
|
// Return dummy id
|
||||||
SelectedFileId("".to_string())
|
SelectedFileId("".to_string())
|
||||||
}
|
}
|
||||||
BlobImpl::File(ref parent_id, _) =>
|
BlobImpl::File(ref f) =>
|
||||||
self.create_sliced_url_id(parent_id, rel_pos),
|
self.create_sliced_url_id(&f.id, rel_pos),
|
||||||
BlobImpl::Memory(ref bytes) => {
|
BlobImpl::Memory(ref bytes) => {
|
||||||
let parent_id = parent.promote_to_file(bytes);
|
let parent_id = parent.promote_to_file(bytes);
|
||||||
*self.blob_impl.borrow_mut() = BlobImpl::Sliced(parent.clone(), rel_pos.clone());
|
*self.blob_impl.borrow_mut() = BlobImpl::Sliced(parent.clone(), rel_pos.clone());
|
||||||
|
@ -195,14 +212,15 @@ impl Blob {
|
||||||
let origin = global.r().get_url().origin().unicode_serialization();
|
let origin = global.r().get_url().origin().unicode_serialization();
|
||||||
let filemanager = global.r().resource_threads().sender();
|
let filemanager = global.r().resource_threads().sender();
|
||||||
|
|
||||||
let entry = BlobURLStoreEntry {
|
let blob_buf = BlobBuf {
|
||||||
|
filename: None,
|
||||||
type_string: self.typeString.clone(),
|
type_string: self.typeString.clone(),
|
||||||
size: self.Size(),
|
size: self.Size(),
|
||||||
bytes: bytes.to_vec(),
|
bytes: bytes.to_vec(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (tx, rx) = ipc::channel().unwrap();
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
let _ = filemanager.send(FileManagerThreadMsg::PromoteMemory(entry, tx, origin.clone()));
|
let _ = filemanager.send(FileManagerThreadMsg::PromoteMemory(blob_buf, tx, origin.clone()));
|
||||||
|
|
||||||
match rx.recv().unwrap() {
|
match rx.recv().unwrap() {
|
||||||
Ok(new_id) => SelectedFileId(new_id.0),
|
Ok(new_id) => SelectedFileId(new_id.0),
|
||||||
|
@ -232,14 +250,14 @@ impl Blob {
|
||||||
|
|
||||||
/// Cleanups at the time of destruction/closing
|
/// Cleanups at the time of destruction/closing
|
||||||
fn clean_up_file_resource(&self) {
|
fn clean_up_file_resource(&self) {
|
||||||
if let BlobImpl::File(ref id, _) = *self.blob_impl.borrow() {
|
if let BlobImpl::File(ref f) = *self.blob_impl.borrow() {
|
||||||
let global = self.global();
|
let global = self.global();
|
||||||
let origin = global.r().get_url().origin().unicode_serialization();
|
let origin = global.r().get_url().origin().unicode_serialization();
|
||||||
|
|
||||||
let filemanager = global.r().resource_threads().sender();
|
let filemanager = global.r().resource_threads().sender();
|
||||||
let (tx, rx) = ipc::channel().unwrap();
|
let (tx, rx) = ipc::channel().unwrap();
|
||||||
|
|
||||||
let msg = FileManagerThreadMsg::DecRef(id.clone(), origin, tx);
|
let msg = FileManagerThreadMsg::DecRef(f.id.clone(), origin, tx);
|
||||||
let _ = filemanager.send(msg);
|
let _ = filemanager.send(msg);
|
||||||
let _ = rx.recv().unwrap();
|
let _ = rx.recv().unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,8 @@ impl File {
|
||||||
|
|
||||||
let global = GlobalRef::Window(window);
|
let global = GlobalRef::Window(window);
|
||||||
|
|
||||||
File::new(global, BlobImpl::new_from_file(selected.id), name, Some(selected.modified as i64), "")
|
File::new(global, BlobImpl::new_from_file(selected.id, selected.filename, selected.size),
|
||||||
|
name, Some(selected.modified as i64), "")
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/FileAPI/#file-constructor
|
// https://w3c.github.io/FileAPI/#file-constructor
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
use ipc_channel::ipc::{self, IpcSender};
|
use ipc_channel::ipc::{self, IpcSender};
|
||||||
use net::filemanager_thread::{FileManagerThreadFactory, UIProvider};
|
use net::filemanager_thread::{FileManagerThreadFactory, UIProvider};
|
||||||
|
use net_traits::blob_url_store::BlobURLStoreError;
|
||||||
use net_traits::filemanager_thread::{FilterPattern, FileManagerThreadMsg, FileManagerThreadError};
|
use net_traits::filemanager_thread::{FilterPattern, FileManagerThreadMsg, FileManagerThreadError};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
@ -56,7 +57,7 @@ fn test_filemanager() {
|
||||||
let msg = rx2.recv().expect("Broken channel");
|
let msg = rx2.recv().expect("Broken channel");
|
||||||
|
|
||||||
let vec = msg.expect("File manager reading failure is unexpected");
|
let vec = msg.expect("File manager reading failure is unexpected");
|
||||||
assert!(test_file_content == vec, "Read content differs");
|
assert_eq!(test_file_content, vec, "Read content differs");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete the id
|
// Delete the id
|
||||||
|
@ -76,7 +77,7 @@ fn test_filemanager() {
|
||||||
let msg = rx2.recv().expect("Broken channel");
|
let msg = rx2.recv().expect("Broken channel");
|
||||||
|
|
||||||
match msg {
|
match msg {
|
||||||
Err(FileManagerThreadError::ReadFileError) => {},
|
Err(FileManagerThreadError::BlobURLStoreError(BlobURLStoreError::InvalidFileID)) => {},
|
||||||
other => {
|
other => {
|
||||||
assert!(false, "Get unexpected response after deleting the id: {:?}", other);
|
assert!(false, "Get unexpected response after deleting the id: {:?}", other);
|
||||||
}
|
}
|
||||||
|
|
Загрузка…
Ссылка в новой задаче