servo: Merge #11875 - Integration and improvements of File API backends (from izgzhen:file-manager-backend); r=Manishearth

Basically three major changes:

1. More complete origin check in `FileManagerThreadMsg`
2. Add reference counting logic to file manage store and script API
3. Integrate the support of slicing

r? @Manishearth

---
<!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: -->
- [x] `./mach build -d` does not report any errors
- [x] `./mach test-tidy` does not report any errors
- [ ] These changes fix #__ (github issue number if applicable).

<!-- Either: -->
- [ ] There are tests for these changes OR
- [ ] These changes do not require tests because _____

<!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->

Source-Repo: https://github.com/servo/servo
Source-Revision: 36974f0746261b971c93ed7dfb9bd726675ccf69
This commit is contained in:
Zhen Zhang 2016-07-04 09:15:23 -07:00
Родитель 0a39e8e7f7
Коммит 182611cd28
14 изменённых файлов: 543 добавлений и 244 удалений

Просмотреть файл

@ -2,60 +2,32 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use filemanager_thread::BlobURLStore;
use hyper::header::{DispositionType, ContentDisposition, DispositionParam};
use hyper::header::{Headers, ContentType, ContentLength, Charset};
use hyper::http::RawStatus;
use mime::{Mime, Attr};
use mime_classifier::MimeClassifier;
use net_traits::ProgressMsg::Done;
use net_traits::blob_url_store::{parse_blob_url, BlobURLStoreEntry, BlobURLStoreError};
use net_traits::blob_url_store::BlobURLStoreEntry;
use net_traits::filemanager_thread::RelativePos;
use net_traits::response::HttpsState;
use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
use resource_thread::{send_error, start_sending_sniffed_opt};
use std::str;
use std::sync::{Arc, RwLock};
use net_traits::{LoadConsumer, LoadData, Metadata};
use resource_thread::start_sending_sniffed_opt;
use std::ops::Index;
use std::sync::Arc;
// TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel
pub fn load(load_data: LoadData, consumer: LoadConsumer,
blob_url_store: Arc<RwLock<BlobURLStore>>,
classifier: Arc<MimeClassifier>) { // XXX: Move it into net process later
match parse_blob_url(&load_data.url) {
None => {
let format_err = NetworkError::Internal(format!("Invalid blob URL format {:?}", load_data.url));
send_error(load_data.url.clone(), format_err, consumer);
}
Some((uuid, _fragment)) => {
match blob_url_store.read().unwrap().request(uuid, &load_data.url.origin()) {
Ok(entry) => load_blob(&load_data, consumer, classifier, entry),
Err(e) => {
let err = match e {
BlobURLStoreError::InvalidKey =>
format!("Invalid blob URL key {:?}", uuid.simple().to_string()),
BlobURLStoreError::InvalidOrigin =>
format!("Invalid blob URL origin {:?}", load_data.url.origin()),
};
send_error(load_data.url.clone(), NetworkError::Internal(err), consumer);
}
}
}
}
}
fn load_blob(load_data: &LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
entry: &BlobURLStoreEntry) {
pub fn load_blob(load_data: &LoadData, start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>, opt_filename: Option<String>,
rel_pos: &RelativePos, entry: &BlobURLStoreEntry) {
let content_type: Mime = entry.type_string.parse().unwrap_or(mime!(Text / Plain));
let charset = content_type.get_param(Attr::Charset);
let mut headers = Headers::new();
if let Some(ref name) = entry.filename {
if let Some(name) = opt_filename {
let charset = charset.and_then(|c| c.as_str().parse().ok());
headers.set(ContentDisposition {
disposition: DispositionType::Inline,
@ -66,8 +38,10 @@ fn load_blob(load_data: &LoadData,
});
}
let range = rel_pos.to_abs_range(entry.size as usize);
headers.set(ContentType(content_type.clone()));
headers.set(ContentLength(entry.size));
headers.set(ContentLength(range.len() as u64));
let metadata = Metadata {
final_url: load_data.url.clone(),
@ -81,7 +55,7 @@ fn load_blob(load_data: &LoadData,
if let Ok(chan) =
start_sending_sniffed_opt(start_chan, metadata, classifier,
&entry.bytes, load_data.context.clone()) {
&entry.bytes.index(range), load_data.context.clone()) {
let _ = chan.send(Done(Ok(())));
}
}

Просмотреть файл

@ -2,21 +2,24 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use blob_loader;
use blob_loader::load_blob;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use mime_classifier::MimeClassifier;
use mime_guess::guess_mime_type_opt;
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreError, BlobURLStoreMsg};
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern};
use net_traits::filemanager_thread::{SelectedFile, FileManagerThreadError, SelectedFileId};
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreError, parse_blob_url};
use net_traits::filemanager_thread::{FileManagerThreadMsg, FileManagerResult, FilterPattern, FileOrigin};
use net_traits::filemanager_thread::{SelectedFile, RelativePos, FileManagerThreadError, SelectedFileId};
use net_traits::{LoadConsumer, LoadData, NetworkError};
use resource_thread::send_error;
use std::cell::Cell;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock};
use std::sync::Arc;
#[cfg(any(target_os = "macos", target_os = "linux"))]
use tinyfiledialogs;
use url::{Url, Origin};
use url::Url;
use util::thread::spawn_named;
use uuid::Uuid;
@ -87,11 +90,26 @@ impl<UI: 'static + UIProvider> FileManagerThreadFactory<UI> for IpcSender<FileMa
}
}
struct FileStoreEntry {
/// Origin of the entry's "creator"
origin: FileOrigin,
/// Backend implementation
file_impl: FileImpl,
/// Reference counting
refs: Cell<usize>,
}
/// File backend implementation
enum FileImpl {
PathOnly(PathBuf),
Memory(BlobURLStoreEntry),
Sliced(Uuid, RelativePos),
}
struct FileManager<UI: 'static + UIProvider> {
receiver: IpcReceiver<FileManagerThreadMsg>,
idmap: HashMap<Uuid, PathBuf>,
store: HashMap<Uuid, FileStoreEntry>,
classifier: Arc<MimeClassifier>,
blob_url_store: Arc<RwLock<BlobURLStore>>,
ui: &'static UI,
}
@ -99,10 +117,9 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
fn new(recv: IpcReceiver<FileManagerThreadMsg>, ui: &'static UI) -> FileManager<UI> {
FileManager {
receiver: recv,
idmap: HashMap::new(),
store: HashMap::new(),
classifier: Arc::new(MimeClassifier::new()),
blob_url_store: Arc::new(RwLock::new(BlobURLStore::new())),
ui: ui
ui: ui,
}
}
@ -110,33 +127,97 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
fn start(&mut self) {
loop {
match self.receiver.recv().unwrap() {
FileManagerThreadMsg::SelectFile(filter, sender) => self.select_file(filter, sender),
FileManagerThreadMsg::SelectFiles(filter, sender) => self.select_files(filter, sender),
FileManagerThreadMsg::ReadFile(sender, id) => {
match self.try_read_file(id) {
FileManagerThreadMsg::SelectFile(filter, sender, origin) => self.select_file(filter, sender, origin),
FileManagerThreadMsg::SelectFiles(filter, sender, origin) => self.select_files(filter, sender, origin),
FileManagerThreadMsg::ReadFile(sender, id, origin) => {
match self.try_read_file(id, origin) {
Ok(buffer) => { let _ = sender.send(Ok(buffer)); }
Err(_) => { let _ = sender.send(Err(FileManagerThreadError::ReadFileError)); }
}
}
FileManagerThreadMsg::DeleteFileID(id) => self.delete_fileid(id),
FileManagerThreadMsg::BlobURLStoreMsg(msg) => self.blob_url_store.write().unwrap().process(msg),
FileManagerThreadMsg::TransferMemory(entry, rel_pos, sender, origin) =>
self.transfer_memory(entry, rel_pos, sender, origin),
FileManagerThreadMsg::AddSlicedEntry(id, rel_pos, sender, origin) =>
self.add_sliced_entry(id, rel_pos, sender, origin),
FileManagerThreadMsg::LoadBlob(load_data, consumer) => {
blob_loader::load(load_data, consumer,
self.blob_url_store.clone(),
self.classifier.clone());
match parse_blob_url(&load_data.url) {
None => {
let e = format!("Invalid blob URL format {:?}", load_data.url);
let format_err = NetworkError::Internal(e);
send_error(load_data.url.clone(), format_err, consumer);
}
Some((id, _fragment)) => {
self.process_request(&load_data, consumer, &RelativePos::full_range(), &id);
}
}
},
FileManagerThreadMsg::DecRef(id, origin) => {
if let Ok(id) = Uuid::parse_str(&id.0) {
self.dec_ref(id, origin);
}
}
FileManagerThreadMsg::IncRef(id, origin) => {
if let Ok(id) = Uuid::parse_str(&id.0) {
self.inc_ref(id, origin);
}
}
FileManagerThreadMsg::Exit => break,
};
}
}
fn inc_ref(&mut self, id: Uuid, origin_in: FileOrigin) {
match self.store.get(&id) {
Some(entry) => {
if entry.origin == origin_in {
entry.refs.set(entry.refs.get() + 1);
}
}
None => return, // Invalid UUID
}
}
fn add_sliced_entry(&mut self, id: SelectedFileId, rel_pos: RelativePos,
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>,
origin_in: FileOrigin) {
if let Ok(id) = Uuid::parse_str(&id.0) {
match self.store.get(&id) {
Some(entry) => {
if entry.origin == origin_in {
// inc_ref on parent entry
entry.refs.set(entry.refs.get() + 1);
} else {
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
return;
}
},
None => {
let _ = sender.send(Err(BlobURLStoreError::InvalidFileID));
return;
}
};
let new_id = Uuid::new_v4();
self.store.insert(new_id, FileStoreEntry {
origin: origin_in.clone(),
file_impl: FileImpl::Sliced(id, rel_pos),
refs: Cell::new(1),
});
let _ = sender.send(Ok(SelectedFileId(new_id.simple().to_string())));
} else {
let _ = sender.send(Err(BlobURLStoreError::InvalidFileID));
}
}
fn select_file(&mut self, patterns: Vec<FilterPattern>,
sender: IpcSender<FileManagerResult<SelectedFile>>) {
sender: IpcSender<FileManagerResult<SelectedFile>>,
origin: FileOrigin) {
match self.ui.open_file_dialog("", patterns) {
Some(s) => {
let selected_path = Path::new(&s);
match self.create_entry(selected_path) {
match self.create_entry(selected_path, &origin) {
Some(triple) => { let _ = sender.send(Ok(triple)); }
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
};
@ -149,7 +230,8 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
}
fn select_files(&mut self, patterns: Vec<FilterPattern>,
sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>) {
sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>,
origin: FileOrigin) {
match self.ui.open_file_dialog_multi("", patterns) {
Some(v) => {
let mut selected_paths = vec![];
@ -161,7 +243,7 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
let mut replies = vec![];
for path in selected_paths {
match self.create_entry(path) {
match self.create_entry(path, &origin) {
Some(triple) => replies.push(triple),
None => { let _ = sender.send(Err(FileManagerThreadError::InvalidSelection)); }
};
@ -176,11 +258,17 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
}
}
fn create_entry(&mut self, file_path: &Path) -> Option<SelectedFile> {
fn create_entry(&mut self, file_path: &Path, origin: &str) -> Option<SelectedFile> {
match File::open(file_path) {
Ok(handler) => {
let id = Uuid::new_v4();
self.idmap.insert(id, file_path.to_path_buf());
let file_impl = FileImpl::PathOnly(file_path.to_path_buf());
self.store.insert(id, FileStoreEntry {
origin: origin.to_string(),
file_impl: file_impl,
refs: Cell::new(1),
});
// Unix Epoch: https://doc.servo.org/std/time/constant.UNIX_EPOCH.html
let epoch = handler.metadata().and_then(|metadata| metadata.modified()).map_err(|_| ())
@ -215,79 +303,138 @@ impl<UI: 'static + UIProvider> FileManager<UI> {
}
}
fn try_read_file(&mut self, id: SelectedFileId) -> Result<Vec<u8>, ()> {
fn try_read_file(&self, id: SelectedFileId, origin_in: String) -> Result<Vec<u8>, ()> {
let id = try!(Uuid::parse_str(&id.0).map_err(|_| ()));
match self.idmap.get(&id) {
Some(filepath) => {
let mut buffer = vec![];
let mut handler = try!(File::open(&filepath).map_err(|_| ()));
try!(handler.read_to_end(&mut buffer).map_err(|_| ()));
Ok(buffer)
},
None => Err(())
}
}
fn delete_fileid(&mut self, id: SelectedFileId) {
if let Ok(id) = Uuid::parse_str(&id.0) {
self.idmap.remove(&id);
}
}
}
pub struct BlobURLStore {
entries: HashMap<Uuid, (Origin, BlobURLStoreEntry)>,
}
impl BlobURLStore {
pub fn new() -> BlobURLStore {
BlobURLStore {
entries: HashMap::new(),
}
}
fn process(&mut self, msg: BlobURLStoreMsg) {
match msg {
BlobURLStoreMsg::AddEntry(entry, origin_str, sender) => {
match Url::parse(&origin_str) {
Ok(base_url) => {
let id = Uuid::new_v4();
self.add_entry(id, base_url.origin(), entry);
let _ = sender.send(Ok(id.simple().to_string()));
match self.store.get(&id) {
Some(entry) => {
match entry.file_impl {
FileImpl::PathOnly(ref filepath) => {
if *entry.origin == origin_in {
let mut buffer = vec![];
let mut handler = try!(File::open(filepath).map_err(|_| ()));
try!(handler.read_to_end(&mut buffer).map_err(|_| ()));
Ok(buffer)
} else {
Err(())
}
},
FileImpl::Memory(ref buffered) => {
Ok(buffered.bytes.clone())
},
FileImpl::Sliced(ref id, ref _rel_pos) => {
self.try_read_file(SelectedFileId(id.simple().to_string()), origin_in)
}
Err(_) => {
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
}
},
None => Err(()),
}
}
fn dec_ref(&mut self, id: Uuid, origin_in: FileOrigin) {
let (is_last_ref, opt_parent_id) = match self.store.get(&id) {
Some(entry) => {
if *entry.origin == origin_in {
let r = entry.refs.get();
if r > 1 {
entry.refs.set(r - 1);
(false, None)
} else {
if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
// if it has a reference to parent id, dec_ref on parent later
(true, Some(parent_id.clone()))
} else {
(true, None)
}
}
} else { // Invalid origin
return;
}
}
None => return, // Invalid UUID
};
if is_last_ref {
self.store.remove(&id);
if let Some(parent_id) = opt_parent_id {
self.dec_ref(parent_id, origin_in);
}
}
}
fn process_request(&self, load_data: &LoadData, consumer: LoadConsumer,
rel_pos: &RelativePos, id: &Uuid) {
let origin_in = load_data.url.origin().unicode_serialization();
match self.store.get(id) {
Some(entry) => {
match entry.file_impl {
FileImpl::Memory(ref buffered) => {
if *entry.origin == origin_in {
load_blob(&load_data, consumer, self.classifier.clone(),
None, rel_pos, buffered);
} else {
let e = format!("Invalid blob URL origin {:?}", origin_in);
send_error(load_data.url.clone(), NetworkError::Internal(e), consumer);
}
},
FileImpl::PathOnly(ref filepath) => {
let opt_filename = filepath.file_name()
.and_then(|osstr| osstr.to_str())
.map(|s| s.to_string());
if *entry.origin == origin_in {
let mut bytes = vec![];
let mut handler = File::open(filepath).unwrap();
let mime = guess_mime_type_opt(filepath);
let size = handler.read_to_end(&mut bytes).unwrap();
let entry = BlobURLStoreEntry {
type_string: match mime {
Some(x) => format!("{}", x),
None => "".to_string(),
},
size: size as u64,
bytes: bytes,
};
load_blob(&load_data, consumer, self.classifier.clone(),
opt_filename, rel_pos, &entry);
} else {
let e = format!("Invalid blob URL origin {:?}", origin_in);
send_error(load_data.url.clone(), NetworkError::Internal(e), consumer);
}
},
FileImpl::Sliced(ref id, ref rel_pos) => {
self.process_request(load_data, consumer, rel_pos, id);
}
}
}
BlobURLStoreMsg::DeleteEntry(id) => {
if let Ok(id) = Uuid::parse_str(&id) {
self.delete_entry(id);
}
},
}
}
pub fn request(&self, id: Uuid, origin: &Origin) -> Result<&BlobURLStoreEntry, BlobURLStoreError> {
match self.entries.get(&id) {
Some(ref pair) => {
if pair.0 == *origin {
Ok(&pair.1)
} else {
Err(BlobURLStoreError::InvalidOrigin)
}
_ => {
let e = format!("Invalid blob URL key {:?}", id.simple().to_string());
send_error(load_data.url.clone(), NetworkError::Internal(e), consumer);
}
None => Err(BlobURLStoreError::InvalidKey)
}
}
pub fn add_entry(&mut self, id: Uuid, origin: Origin, blob: BlobURLStoreEntry) {
self.entries.insert(id, (origin, blob));
}
fn transfer_memory(&mut self, entry: BlobURLStoreEntry, rel_pos: RelativePos,
sender: IpcSender<Result<SelectedFileId, BlobURLStoreError>>, origin: FileOrigin) {
match Url::parse(&origin) { // parse to check sanity
Ok(_) => {
let id = Uuid::new_v4();
self.store.insert(id, FileStoreEntry {
origin: origin.clone(),
file_impl: FileImpl::Memory(entry),
refs: Cell::new(1),
});
let sliced_id = SelectedFileId(id.simple().to_string());
pub fn delete_entry(&mut self, id: Uuid) {
self.entries.remove(&id);
self.add_sliced_entry(sliced_id, rel_pos, sender, origin);
}
Err(_) => {
let _ = sender.send(Err(BlobURLStoreError::InvalidOrigin));
}
}
}
}

Просмотреть файл

@ -18,6 +18,7 @@ hyper = { version = "0.9.9", features = [ "serde-serialization" ] }
image = "0.10"
lazy_static = "0.2"
log = "0.3.5"
num-traits = "0.1.32"
serde = "0.7.11"
serde_macros = "0.7.11"
url = {version = "1.0.0", features = ["heap_size"]}

Просмотреть файл

@ -2,37 +2,24 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use ipc_channel::ipc::IpcSender;
use std::str::FromStr;
use url::Url;
use uuid::Uuid;
/// Errors returns to BlobURLStoreMsg::Request
#[derive(Clone, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum BlobURLStoreError {
/// Invalid UUID key
InvalidKey,
/// Invalid File UUID
InvalidFileID,
/// Invalid URL origin
InvalidOrigin,
}
#[derive(Serialize, Deserialize)]
pub enum BlobURLStoreMsg {
/// Add an entry and send back the associated uuid
/// XXX: Second field is an unicode-serialized Origin, it is a temporary workaround
/// and should not be trusted. See issue https://github.com/servo/servo/issues/11722
AddEntry(BlobURLStoreEntry, String, IpcSender<Result<String, BlobURLStoreError>>),
/// Delete an entry by uuid
DeleteEntry(String),
}
/// Blob URL store entry, a packaged form of Blob DOM object
#[derive(Clone, Serialize, Deserialize)]
pub struct BlobURLStoreEntry {
/// MIME type string
pub type_string: String,
/// Some filename if the backend of Blob is a file
pub filename: Option<String>,
/// Size of content in bytes
pub size: u64,
/// Content of blob

Просмотреть файл

@ -2,11 +2,102 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use blob_url_store::BlobURLStoreMsg;
use blob_url_store::{BlobURLStoreEntry, BlobURLStoreError};
use ipc_channel::ipc::IpcSender;
use num_traits::ToPrimitive;
use std::cmp::{max, min};
use std::ops::Range;
use std::path::PathBuf;
use super::{LoadConsumer, LoadData};
// HACK: We should send Origin directly instead of this in future, blocked on #11722
/// File manager store entry's origin
pub type FileOrigin = String;
/// Relative slice positions of a sequence,
/// whose semantic should be consistent with (start, end) parameters in
/// https://w3c.github.io/FileAPI/#dfn-slice
#[derive(Clone, Deserialize, Serialize)]
pub struct RelativePos {
/// Relative to first byte if non-negative,
/// relative to one past last byte if negative,
pub start: i64,
/// Relative offset from first byte if Some(non-negative),
/// relative to one past last byte if Some(negative),
/// None if one past last byte
pub end: Option<i64>,
}
impl RelativePos {
/// Full range from start to end
pub fn full_range() -> RelativePos {
RelativePos {
start: 0,
end: Some(0),
}
}
/// Instantiate optional slice position parameters
pub fn from_opts(start: Option<i64>, end: Option<i64>) -> RelativePos {
RelativePos {
start: start.unwrap_or(0),
end: end,
}
}
/// Slice the inner sliced range by repositioning
pub fn slice_inner(&self, rel_pos: &RelativePos) -> RelativePos {
RelativePos {
start: self.start + rel_pos.start,
end: match (self.end, rel_pos.end) {
(Some(old_end), Some(rel_end)) => Some(old_end + rel_end),
(old, None) => old,
(None, rel) => rel,
}
}
}
/// Compute absolute range by giving the total size
/// https://w3c.github.io/FileAPI/#slice-method-algo
pub fn to_abs_range(&self, size: usize) -> Range<usize> {
let size = size as i64;
let start = {
if self.start < 0 {
max(size + self.start, 0)
} else {
min(self.start, size)
}
};
let end = match self.end {
Some(rel_end) => {
if rel_end < 0 {
max(size + rel_end, 0)
} else {
min(rel_end, size)
}
}
None => size,
};
let span: i64 = max(end - start, 0);
Range {
start: start.to_usize().unwrap(),
end: (start + span).to_usize().unwrap(),
}
}
/// Inverse operation of to_abs_range
pub fn from_abs_range(range: Range<usize>, size: usize) -> RelativePos {
RelativePos {
start: range.start as i64,
end: Some(size as i64 - range.end as i64),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SelectedFileId(pub String);
@ -27,23 +118,29 @@ pub struct FilterPattern(pub String);
#[derive(Deserialize, Serialize)]
pub enum FileManagerThreadMsg {
/// Select a single file, return triple (FileID, FileName, lastModified)
SelectFile(Vec<FilterPattern>, IpcSender<FileManagerResult<SelectedFile>>),
SelectFile(Vec<FilterPattern>, IpcSender<FileManagerResult<SelectedFile>>, FileOrigin),
/// Select multiple files, return a vector of triples
SelectFiles(Vec<FilterPattern>, IpcSender<FileManagerResult<Vec<SelectedFile>>>),
SelectFiles(Vec<FilterPattern>, IpcSender<FileManagerResult<Vec<SelectedFile>>>, FileOrigin),
/// Read file, return the bytes
ReadFile(IpcSender<FileManagerResult<Vec<u8>>>, SelectedFileId),
/// Delete the FileID entry
DeleteFileID(SelectedFileId),
// Blob URL message
BlobURLStoreMsg(BlobURLStoreMsg),
ReadFile(IpcSender<FileManagerResult<Vec<u8>>>, SelectedFileId, FileOrigin),
/// Load resource by Blob URL
LoadBlob(LoadData, LoadConsumer),
/// Add an entry and send back the associated uuid
TransferMemory(BlobURLStoreEntry, RelativePos, IpcSender<Result<SelectedFileId, BlobURLStoreError>>, FileOrigin),
/// Add a sliced entry pointing to the parent id with a relative slicing positing
AddSlicedEntry(SelectedFileId, RelativePos, IpcSender<Result<SelectedFileId, BlobURLStoreError>>, FileOrigin),
/// Decrease reference count
DecRef(SelectedFileId, FileOrigin),
/// Increase reference count
IncRef(SelectedFileId, FileOrigin),
/// Shut down this thread
Exit,
}

Просмотреть файл

@ -23,6 +23,7 @@ extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate msg;
extern crate num_traits;
extern crate serde;
extern crate url;
extern crate util;

Просмотреть файл

@ -57,7 +57,7 @@ use js::jsval::JSVal;
use js::rust::Runtime;
use libc;
use msg::constellation_msg::{FrameType, PipelineId, SubpageId, WindowSizeData, WindowSizeType, ReferrerPolicy};
use net_traits::filemanager_thread::SelectedFileId;
use net_traits::filemanager_thread::{SelectedFileId, RelativePos};
use net_traits::image::base::{Image, ImageMetadata};
use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheThread};
use net_traits::response::HttpsState;
@ -331,6 +331,7 @@ no_jsmanaged_fields!(ReferrerPolicy);
no_jsmanaged_fields!(ResourceThreads);
no_jsmanaged_fields!(SystemTime);
no_jsmanaged_fields!(SelectedFileId);
no_jsmanaged_fields!(RelativePos);
no_jsmanaged_fields!(OpaqueStyleAndLayoutData);
no_jsmanaged_fields!(CSSErrorReporter);
no_jsmanaged_fields!(WebGLBufferId);

Просмотреть файл

@ -8,17 +8,18 @@ use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
use dom::bindings::codegen::UnionTypes::BlobOrString;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use encoding::all::UTF_8;
use encoding::types::{EncoderTrap, Encoding};
use ipc_channel::ipc;
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId};
use num_traits::ToPrimitive;
use net_traits::IpcSend;
use net_traits::blob_url_store::BlobURLStoreEntry;
use net_traits::filemanager_thread::{FileManagerThreadMsg, SelectedFileId, RelativePos};
use std::ascii::AsciiExt;
use std::cell::Cell;
use std::cmp::{max, min};
use std::ops::Range;
use std::sync::Arc;
#[derive(Clone, JSTraceable)]
@ -31,36 +32,12 @@ pub struct DataSlice {
impl DataSlice {
/// Construct DataSlice from reference counted bytes
pub fn new(bytes: Arc<Vec<u8>>, start: Option<i64>, end: Option<i64>) -> DataSlice {
let size = bytes.len() as i64;
let relativeStart: i64 = match start {
None => 0,
Some(start) => {
if start < 0 {
max(size + start, 0)
} else {
min(start, size)
}
}
};
let relativeEnd: i64 = match end {
None => size,
Some(end) => {
if end < 0 {
max(size + end, 0)
} else {
min(end, size)
}
}
};
let span: i64 = max(relativeEnd - relativeStart, 0);
let start = relativeStart.to_usize().unwrap();
let end = (relativeStart + span).to_usize().unwrap();
let range = RelativePos::from_opts(start, end).to_abs_range(bytes.len());
DataSlice {
bytes: bytes,
bytes_start: start,
bytes_end: end
bytes_start: range.start,
bytes_end: range.end,
}
}
@ -87,15 +64,30 @@ impl DataSlice {
pub fn size(&self) -> u64 {
(self.bytes_end as u64) - (self.bytes_start as u64)
}
/// Further adjust the slice range based on passed-in relative positions
pub fn slice(&self, pos: &RelativePos) -> DataSlice {
let old_size = self.size();
let range = pos.to_abs_range(old_size as usize);
DataSlice {
bytes: self.bytes.clone(),
bytes_start: self.bytes_start + range.start,
bytes_end: self.bytes_start + range.end,
}
}
}
#[derive(Clone, JSTraceable)]
#[must_root]
#[derive(JSTraceable)]
pub enum BlobImpl {
/// File-based, cached backend
/// File-based blob, including id and possibly cached content
File(SelectedFileId, DOMRefCell<Option<DataSlice>>),
/// Memory-based backend
/// Memory-based blob
Memory(DataSlice),
/// Sliced blob, including parent blob and
/// relative positions representing current slicing range,
/// it is leaf of a two-layer fat tree
Sliced(JS<Blob>, RelativePos),
}
impl BlobImpl {
@ -120,26 +112,58 @@ impl BlobImpl {
pub struct Blob {
reflector_: Reflector,
#[ignore_heap_size_of = "No clear owner"]
blob_impl: BlobImpl,
blob_impl: DOMRefCell<BlobImpl>,
typeString: String,
isClosed_: Cell<bool>,
}
impl Blob {
#[allow(unrooted_must_root)]
pub fn new(global: GlobalRef, blob_impl: BlobImpl, typeString: String) -> Root<Blob> {
let boxed_blob = box Blob::new_inherited(blob_impl, typeString);
reflect_dom_object(boxed_blob, global, BlobBinding::Wrap)
}
#[allow(unrooted_must_root)]
pub fn new_inherited(blob_impl: BlobImpl, typeString: String) -> Blob {
Blob {
reflector_: Reflector::new(),
blob_impl: blob_impl,
blob_impl: DOMRefCell::new(blob_impl),
typeString: typeString,
isClosed_: Cell::new(false),
}
}
#[allow(unrooted_must_root)]
fn new_sliced(parent: &Blob, rel_pos: RelativePos,
relativeContentType: DOMString) -> Root<Blob> {
let global = parent.global();
let blob_impl = match *parent.blob_impl.borrow() {
BlobImpl::File(ref id, _) => {
inc_ref_id(global.r(), id.clone());
// Create new parent node
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
}
BlobImpl::Memory(_) => {
// Create new parent node
BlobImpl::Sliced(JS::from_ref(parent), rel_pos)
}
BlobImpl::Sliced(ref grandparent, ref old_rel_pos) => {
// Adjust the slicing position, using same parent
let new_rel_pos = old_rel_pos.slice_inner(&rel_pos);
if let BlobImpl::File(ref id, _) = *grandparent.blob_impl.borrow() {
inc_ref_id(global.r(), id.clone());
}
BlobImpl::Sliced(grandparent.clone(), new_rel_pos)
}
};
Blob::new(global.r(), blob_impl, relativeContentType.into())
}
// https://w3c.github.io/FileAPI/#constructorBlob
pub fn Constructor(global: GlobalRef,
blobParts: Option<Vec<BlobOrString>>,
@ -160,19 +184,29 @@ impl Blob {
/// Get a slice to inner data, this might incur synchronous read and caching
pub fn get_slice(&self) -> Result<DataSlice, ()> {
match self.blob_impl {
BlobImpl::File(ref id, ref slice) => {
match *slice.borrow() {
match *self.blob_impl.borrow() {
BlobImpl::File(ref id, ref cached) => {
let buffer = match *cached.borrow() {
Some(ref s) => Ok(s.clone()),
None => {
let global = self.global();
let s = read_file(global.r(), id.clone())?;
*slice.borrow_mut() = Some(s.clone()); // Cached
Ok(s)
}
};
// Cache
if let Ok(buf) = buffer.clone() {
*cached.borrow_mut() = Some(buf);
}
buffer
}
BlobImpl::Memory(ref s) => Ok(s.clone()),
BlobImpl::Sliced(ref parent, ref rel_pos) => {
let dataslice = parent.get_slice_or_empty();
Ok(dataslice.slice(rel_pos))
}
BlobImpl::Memory(ref s) => Ok(s.clone())
}
}
@ -180,12 +214,83 @@ impl Blob {
pub fn get_slice_or_empty(&self) -> DataSlice {
self.get_slice().unwrap_or(DataSlice::empty())
}
pub fn get_id(&self) -> SelectedFileId {
match *self.blob_impl.borrow() {
BlobImpl::File(ref id, _) => id.clone(),
BlobImpl::Memory(ref slice) => self.promote_to_file(slice),
BlobImpl::Sliced(ref parent, ref rel_pos) => {
match *parent.blob_impl.borrow() {
BlobImpl::Sliced(_, _) => {
debug!("Sliced can't have a sliced parent");
// Return dummy id
SelectedFileId("".to_string())
}
BlobImpl::File(ref parent_id, _) =>
self.create_sliced_id(parent_id, rel_pos),
BlobImpl::Memory(ref parent_slice) => {
let parent_id = parent.promote_to_file(parent_slice);
*self.blob_impl.borrow_mut() = BlobImpl::Sliced(parent.clone(), rel_pos.clone());
self.create_sliced_id(&parent_id, rel_pos)
}
}
}
}
}
/// Promite memory-based Blob to file-based,
/// The bytes in data slice will be transferred to file manager thread
fn promote_to_file(&self, self_slice: &DataSlice) -> SelectedFileId {
let global = self.global();
let origin = global.r().get_url().origin().unicode_serialization();
let filemanager = global.r().resource_threads().sender();
let bytes = self_slice.get_bytes();
let rel_pos = RelativePos::from_abs_range(Range {
start: self_slice.bytes_start,
end: self_slice.bytes_end,
}, self_slice.bytes.len());
let entry = BlobURLStoreEntry {
type_string: self.typeString.clone(),
size: self.Size(),
bytes: bytes.to_vec(),
};
let (tx, rx) = ipc::channel().unwrap();
let _ = filemanager.send(FileManagerThreadMsg::TransferMemory(entry, rel_pos, tx, origin.clone()));
match rx.recv().unwrap() {
Ok(new_id) => SelectedFileId(new_id.0),
// Dummy id
Err(_) => SelectedFileId("".to_string()),
}
}
fn create_sliced_id(&self, parent_id: &SelectedFileId,
rel_pos: &RelativePos) -> SelectedFileId {
let global = self.global();
let origin = global.r().get_url().origin().unicode_serialization();
let filemanager = global.r().resource_threads().sender();
let (tx, rx) = ipc::channel().unwrap();
let msg = FileManagerThreadMsg::AddSlicedEntry(parent_id.clone(),
rel_pos.clone(),
tx, origin.clone());
let _ = filemanager.send(msg);
let new_id = rx.recv().unwrap().unwrap();
// Return the indirect id reference
SelectedFileId(new_id.0)
}
}
fn read_file(global: GlobalRef, id: SelectedFileId) -> Result<DataSlice, ()> {
let file_manager = global.filemanager_thread();
let (chan, recv) = ipc::channel().map_err(|_|())?;
let _ = file_manager.send(FileManagerThreadMsg::ReadFile(chan, id));
let origin = global.get_url().origin().unicode_serialization();
let msg = FileManagerThreadMsg::ReadFile(chan, id, origin);
let _ = file_manager.send(msg);
let result = match recv.recv() {
Ok(ret) => ret,
@ -248,10 +353,8 @@ impl BlobMethods for Blob {
}
};
let global = self.global();
let bytes = self.get_slice_or_empty().bytes.clone();
let slice = DataSlice::new(bytes, start, end);
Blob::new(global.r(), BlobImpl::new_from_slice(slice), relativeContentType.into())
let rel_pos = RelativePos::from_opts(start, end);
Blob::new_sliced(self, rel_pos, relativeContentType)
}
// https://w3c.github.io/FileAPI/#dfn-isClosed
@ -274,7 +377,6 @@ impl BlobMethods for Blob {
}
}
impl BlobBinding::BlobPropertyBag {
/// Get the normalized inner type string
/// https://w3c.github.io/FileAPI/#dfn-type
@ -292,3 +394,11 @@ fn is_ascii_printable(string: &str) -> bool {
// https://w3c.github.io/FileAPI/#constructorBlob
string.chars().all(|c| c >= '\x20' && c <= '\x7E')
}
/// Bump the reference counter in file manager thread
fn inc_ref_id(global: GlobalRef, id: SelectedFileId) {
let file_manager = global.filemanager_thread();
let origin = global.get_url().origin().unicode_serialization();
let msg = FileManagerThreadMsg::IncRef(id, origin);
let _ = file_manager.send(msg);
}

Просмотреть файл

@ -23,6 +23,7 @@ pub struct File {
}
impl File {
#[allow(unrooted_must_root)]
fn new_inherited(blob_impl: BlobImpl, name: DOMString,
modified: Option<i64>, typeString: &str) -> File {
File {
@ -39,6 +40,7 @@ impl File {
}
}
#[allow(unrooted_must_root)]
pub fn new(global: GlobalRef, blob_impl: BlobImpl,
name: DOMString, modified: Option<i64>, typeString: &str) -> Root<File> {
reflect_dom_object(box File::new_inherited(blob_impl, name, modified, typeString),

Просмотреть файл

@ -1152,6 +1152,7 @@ impl Activatable for HTMLInputElement {
InputType::InputFile => {
// https://html.spec.whatwg.org/multipage/#file-upload-state-(type=file)
let window = window_from_node(self);
let origin = window.get_url().origin().unicode_serialization();
let filemanager = window.resource_threads().sender();
let mut files: Vec<Root<File>> = vec![];
@ -1162,7 +1163,7 @@ impl Activatable for HTMLInputElement {
if self.Multiple() {
let (chan, recv) = ipc::channel().expect("Error initializing channel");
let msg = FileManagerThreadMsg::SelectFiles(filter, chan);
let msg = FileManagerThreadMsg::SelectFiles(filter, chan, origin);
let _ = filemanager.send(msg).unwrap();
match recv.recv().expect("IpcSender side error") {
@ -1182,7 +1183,7 @@ impl Activatable for HTMLInputElement {
};
} else {
let (chan, recv) = ipc::channel().expect("Error initializing channel");
let msg = FileManagerThreadMsg::SelectFile(filter, chan);
let msg = FileManagerThreadMsg::SelectFile(filter, chan, origin);
let _ = filemanager.send(msg).unwrap();
match recv.recv().expect("IpcSender side error") {

Просмотреть файл

@ -13,10 +13,9 @@ use dom::bindings::str::{DOMString, USVString};
use dom::blob::Blob;
use dom::urlhelper::UrlHelper;
use dom::urlsearchparams::URLSearchParams;
use ipc_channel::ipc;
use net_traits::IpcSend;
use net_traits::blob_url_store::{BlobURLStoreEntry, BlobURLStoreMsg, parse_blob_url};
use net_traits::filemanager_thread::FileManagerThreadMsg;
use net_traits::blob_url_store::parse_blob_url;
use net_traits::filemanager_thread::{SelectedFileId, FileManagerThreadMsg};
use std::borrow::ToOwned;
use std::default::Default;
use url::quirks::domain_to_unicode;
@ -125,34 +124,9 @@ impl URL {
return DOMString::from(URL::unicode_serialization_blob_url(&origin, &id));
}
let filemanager = global.resource_threads().sender();
let id = blob.get_id();
let slice = blob.get_slice_or_empty();
let bytes = slice.get_bytes();
let entry = BlobURLStoreEntry {
type_string: blob.Type().to_string(),
filename: None, // XXX: the filename is currently only in File object now
size: blob.Size(),
bytes: bytes.to_vec(),
};
let (tx, rx) = ipc::channel().unwrap();
let msg = BlobURLStoreMsg::AddEntry(entry, origin.clone(), tx);
let _ = filemanager.send(FileManagerThreadMsg::BlobURLStoreMsg(msg));
match rx.recv().unwrap() {
Ok(id) => {
DOMString::from(URL::unicode_serialization_blob_url(&origin, &id))
}
Err(_) => {
// Generate a dummy id
let id = Uuid::new_v4().simple().to_string();
DOMString::from(URL::unicode_serialization_blob_url(&origin, &id))
}
}
DOMString::from(URL::unicode_serialization_blob_url(&origin, &id.0))
}
// https://w3c.github.io/FileAPI/#dfn-revokeObjectURL
@ -166,13 +140,15 @@ impl URL {
NOTE: The first step is unnecessary, since closed blobs do not exist in the store
*/
let origin = global.get_url().origin().unicode_serialization();
match Url::parse(&url) {
Ok(url) => match parse_blob_url(&url) {
Some((id, _)) => {
let filemanager = global.resource_threads().sender();
let msg = BlobURLStoreMsg::DeleteEntry(id.simple().to_string());
let _ = filemanager.send(FileManagerThreadMsg::BlobURLStoreMsg(msg));
let id = SelectedFileId(id.simple().to_string());
let msg = FileManagerThreadMsg::DecRef(id, origin);
let _ = filemanager.send(msg);
}
None => {}
},

1
servo/components/servo/Cargo.lock сгенерированный
Просмотреть файл

@ -1478,6 +1478,7 @@ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
"num-traits 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_macros 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",

1
servo/ports/cef/Cargo.lock сгенерированный
Просмотреть файл

@ -1359,6 +1359,7 @@ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"msg 0.0.1",
"num-traits 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_macros 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",

Просмотреть файл

@ -35,12 +35,12 @@ fn test_filemanager() {
.expect("Read tests/unit/net/test.txt error");
let patterns = vec![FilterPattern(".txt".to_string())];
let origin = "test.com".to_string();
{
// Try to select a dummy file "tests/unit/net/test.txt"
let (tx, rx) = ipc::channel().unwrap();
chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx)).unwrap();
chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx, origin.clone())).unwrap();
let selected = rx.recv().expect("File manager channel is broken")
.expect("The file manager failed to find test.txt");
@ -51,7 +51,7 @@ fn test_filemanager() {
// Test by reading, expecting same content
{
let (tx2, rx2) = ipc::channel().unwrap();
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone())).unwrap();
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone(), origin.clone())).unwrap();
let msg = rx2.recv().expect("File manager channel is broken");
@ -60,12 +60,12 @@ fn test_filemanager() {
}
// Delete the id
chan.send(FileManagerThreadMsg::DeleteFileID(selected.id.clone())).unwrap();
chan.send(FileManagerThreadMsg::DecRef(selected.id.clone(), origin.clone())).unwrap();
// Test by reading again, expecting read error because we invalidated the id
{
let (tx2, rx2) = ipc::channel().unwrap();
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone())).unwrap();
chan.send(FileManagerThreadMsg::ReadFile(tx2, selected.id.clone(), origin.clone())).unwrap();
let msg = rx2.recv().expect("File manager channel is broken");
@ -82,7 +82,7 @@ fn test_filemanager() {
{
let (tx, rx) = ipc::channel().unwrap();
let _ = chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx));
let _ = chan.send(FileManagerThreadMsg::SelectFile(patterns.clone(), tx, origin.clone()));
assert!(rx.try_recv().is_err(), "The thread should not respond normally after exited");
}