Places uniffication (#4770)
* initial conversion of errors for places uniffication * Converted history metadata functions for places uniffication * Converted kotlin history metadata functions * Converted iOS history metadata functions * [Places uniffication] Fix places error issues (#4687) * remove uniffi-specific wrapper and updated error test * Uniffi VisitObservation and apply_observation (#4689) * uniffied VisitObservation, HistoryVisitInfo and methods using those structs * Uniffi top frecent site info (#4711) * Uniffi top frecent site info * Uniffis FrecencyThresholdOption * Uniffi Places Sync (#4714) * uniffi places sync and sync15 functions * Uniffi Search result and match url (#4720) * Uniffi query autocomplete * uniffi places_accept_result * uniffi places_match_url * cargo fmt * cargo clippy * remove accept_result from ffi crate * Adds comment on the uniffi bug with varaint shadowing type * Uniffi Places Interrupt methods (#4726) * uniffi places interrupt functions * Consolidate types (#4736) * Updates the visit observation to use URL * Uses VisitTransition for HistoryVisitInfo * updates uniffi to 0.16 * Uniffied bookmarks * switch to use uint in the apis for adding bookmarks (#4747) * Remove the manual places FFI entirely. * folders and separators now have non-nullable parents * Always insert via Insertable items, and clarify separation between node types. * Add json_tree module with all json-based types and implementation. * Fix swift warnings re non-null parentGuid * bookmarks: Remove public_node and introduce a fetch module * Update swift code to make breaking changes fixes cleaner (#4766) * rebased and added changelog entry Co-authored-by: lougeniac64 <lougeniaC64@users.noreply.github.com> Co-authored-by: Tarik Eshaq <teshaq@mozilla.com> Co-authored-by: Mark Hammond <mhammond@skippinet.com.au>
This commit is contained in:
Родитель
463b5b62a1
Коммит
73427f79a6
|
@ -18,3 +18,8 @@ Use the template below to make assigning a version number during the release cut
|
|||
- Description of the change with a link to the pull request ([#0000](https://github.com/mozilla/application-services/pull/0000))
|
||||
|
||||
-->
|
||||
|
||||
## Places
|
||||
|
||||
### ⚠️ Breaking Changes ⚠️
|
||||
- Places has been completely UniFFI-ed
|
|
@ -1508,7 +1508,6 @@ dependencies = [
|
|||
"logins",
|
||||
"nimbus-sdk",
|
||||
"places",
|
||||
"places-ffi",
|
||||
"push",
|
||||
"rc_log_ffi",
|
||||
"tabs",
|
||||
|
@ -1773,7 +1772,7 @@ dependencies = [
|
|||
"lazy_static",
|
||||
"logins",
|
||||
"nimbus-sdk",
|
||||
"places-ffi",
|
||||
"places",
|
||||
"push",
|
||||
"rc_log_ffi",
|
||||
"sync_manager",
|
||||
|
@ -1790,7 +1789,7 @@ dependencies = [
|
|||
"glean-ffi",
|
||||
"logins",
|
||||
"nimbus-sdk",
|
||||
"places-ffi",
|
||||
"places",
|
||||
"rc_log_ffi",
|
||||
"viaduct",
|
||||
"viaduct-reqwest",
|
||||
|
@ -2426,7 +2425,6 @@ dependencies = [
|
|||
"dogear",
|
||||
"env_logger 0.7.1",
|
||||
"error-support",
|
||||
"ffi-support",
|
||||
"idna",
|
||||
"interrupt-support",
|
||||
"lazy_static",
|
||||
|
@ -2435,8 +2433,6 @@ dependencies = [
|
|||
"parking_lot 0.5.5",
|
||||
"percent-encoding",
|
||||
"pretty_assertions",
|
||||
"prost",
|
||||
"prost-derive",
|
||||
"rusqlite",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
|
@ -2467,27 +2463,6 @@ dependencies = [
|
|||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "places-ffi"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ffi-support",
|
||||
"interrupt-support",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"places",
|
||||
"prost",
|
||||
"serde_json",
|
||||
"sql-support",
|
||||
"sync-guid",
|
||||
"sync15",
|
||||
"types",
|
||||
"uniffi",
|
||||
"uniffi_macros",
|
||||
"url",
|
||||
"viaduct",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "places-integration-tests"
|
||||
version = "0.1.0"
|
||||
|
@ -3827,9 +3802,9 @@ checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
|
|||
|
||||
[[package]]
|
||||
name = "uniffi"
|
||||
version = "0.15.2"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15e39922a6e95a3933017766cceebdc071891d43257cae272c55028842da724a"
|
||||
checksum = "472b6bbce3490a55f4f889e382a64803693929fea11e05c1057c0363af8fe019"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
|
@ -3842,9 +3817,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uniffi_bindgen"
|
||||
version = "0.15.2"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc0b1d54aee6bf6ab8e13d322cd9438c559e768790d356ecb54cf73f587afadc"
|
||||
checksum = "87e8e61e7f6d03d3bf70fe16c956061b7b5a5ef571171bb7bec160c86b5a5779"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askama",
|
||||
|
@ -3859,9 +3834,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uniffi_build"
|
||||
version = "0.15.2"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eef7d21f7e302fedef7e52f0dd4a5b1a636c99d4afea9f23a762106676f4960"
|
||||
checksum = "9e41292abe9503cfdc682f53566b785f322548ca728e5b3b6183b0f2694b4225"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"uniffi_bindgen",
|
||||
|
@ -3869,9 +3844,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uniffi_macros"
|
||||
version = "0.15.2"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16590eef444dcdd49dfbaa08f5931469375756fac5d3f831a287df65ba1d8cc8"
|
||||
checksum = "58ce535e537d6a3004d503b405bde5971a0c0952a836a4c756af23bf09acd823"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"proc-macro2",
|
||||
|
|
|
@ -7,7 +7,6 @@ members = [
|
|||
"components/logins",
|
||||
"components/nimbus",
|
||||
"components/places",
|
||||
"components/places/ffi",
|
||||
"components/push",
|
||||
"components/rc_log",
|
||||
"components/support/error",
|
||||
|
@ -78,7 +77,6 @@ default-members = [
|
|||
"components/logins",
|
||||
"components/nimbus",
|
||||
"components/places",
|
||||
"components/places/ffi",
|
||||
"components/push",
|
||||
"components/rc_log",
|
||||
"components/support/error",
|
||||
|
|
|
@ -23,7 +23,7 @@ sync15 = { path = "../sync15" }
|
|||
sync15-traits = {path = "../support/sync15-traits"}
|
||||
thiserror = "1.0"
|
||||
types = { path = "../support/types" }
|
||||
uniffi = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
url = { version = "2.2", features = ["serde"] }
|
||||
|
||||
[dependencies.rusqlite]
|
||||
|
@ -36,4 +36,4 @@ libsqlite3-sys = "0.20.1"
|
|||
|
||||
[build-dependencies]
|
||||
nss_build_common = { path = "../support/rc_crypto/nss/nss_build_common" }
|
||||
uniffi_build = { version = "^0.15", features = [ "builtin-bindgen" ]}
|
||||
uniffi_build = { version = "^0.16", features = [ "builtin-bindgen" ]}
|
||||
|
|
|
@ -9,8 +9,8 @@ exclude = ["/android", "/ios"]
|
|||
[dependencies]
|
||||
log = "0.4"
|
||||
thiserror = "1.0"
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features=["builtin-bindgen"] }
|
||||
uniffi_build = { version = "^0.16", features=["builtin-bindgen"] }
|
||||
|
|
|
@ -24,11 +24,11 @@ error-support = { path = "../support/error" }
|
|||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
sync-guid = { path = "../support/guid", features = ["random"] }
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features=["builtin-bindgen"] }
|
||||
uniffi_build = { version = "^0.16", features=["builtin-bindgen"] }
|
||||
|
||||
[dev-dependencies]
|
||||
viaduct-reqwest = { path = "../support/viaduct-reqwest" }
|
||||
|
|
|
@ -26,15 +26,15 @@ error-support = { path = "../support/error" }
|
|||
sync-guid = { path = "../support/guid", features = ["rusqlite_support", "random"] }
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.24.2"
|
||||
features = ["sqlcipher", "limits", "unlock_notify"]
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features=["builtin-bindgen"] }
|
||||
uniffi_build = { version = "^0.16", features=["builtin-bindgen"] }
|
||||
|
||||
[dev-dependencies]
|
||||
more-asserts = "0.2"
|
||||
|
|
|
@ -32,11 +32,11 @@ uuid = { version = "0.8", features = ["serde", "v4"]}
|
|||
sha2 = "0.9"
|
||||
hex = "0.4"
|
||||
once_cell = "1"
|
||||
uniffi = { version = "^0.15", optional = true }
|
||||
uniffi = { version = "^0.16", optional = true }
|
||||
chrono = { version = "0.4", features = ["serde"]}
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features = [ "builtin-bindgen" ], optional = true }
|
||||
uniffi_build = { version = "^0.16", features = [ "builtin-bindgen" ], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
viaduct-reqwest = { path = "../support/viaduct-reqwest" }
|
||||
|
|
|
@ -23,20 +23,17 @@ percent-encoding = "2.1"
|
|||
caseless = "0.2"
|
||||
sql-support = { path = "../support/sql" }
|
||||
types = { path = "../support/types" }
|
||||
ffi-support = "0.4"
|
||||
bitflags = "1.2"
|
||||
idna = "0.2"
|
||||
memchr = "2.3"
|
||||
prost = "0.8"
|
||||
prost-derive = "0.8"
|
||||
dogear = "0.4"
|
||||
interrupt-support = { path = "../support/interrupt" }
|
||||
error-support = { path = "../support/error" }
|
||||
sync-guid = { path = "../support/guid", features = ["rusqlite_support", "random"]}
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.24.2"
|
||||
|
@ -48,4 +45,4 @@ tempfile = "3.1"
|
|||
env_logger = {version = "0.7", default-features = false}
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features=["builtin-bindgen"] }
|
||||
uniffi_build = { version = "^0.16", features=["builtin-bindgen"] }
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
package mozilla.appservices.places
|
||||
|
||||
import java.lang.RuntimeException
|
||||
import mozilla.appservices.places.uniffi.BookmarkItem
|
||||
|
||||
/**
|
||||
* Enumeration of the ids of the roots of the bookmarks tree.
|
||||
|
@ -23,140 +23,6 @@ enum class BookmarkRoot(val id: String) {
|
|||
Mobile("mobile______"),
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumeration of the type of a bookmark item.
|
||||
*
|
||||
* Must match BookmarkType in the Rust code.
|
||||
*/
|
||||
enum class BookmarkType(val value: Int) {
|
||||
Bookmark(1),
|
||||
Folder(2),
|
||||
Separator(3),
|
||||
}
|
||||
|
||||
/**
|
||||
* An interface defining the set of fields common to all nodes
|
||||
* in the bookmark tree.
|
||||
*/
|
||||
sealed class BookmarkTreeNode {
|
||||
/**
|
||||
* The type of this bookmark.
|
||||
*/
|
||||
abstract val type: BookmarkType
|
||||
|
||||
/**
|
||||
* The guid of this record. Bookmark guids are always 12 characters in the url-safe
|
||||
* base64 character set.
|
||||
*/
|
||||
abstract val guid: String
|
||||
|
||||
/**
|
||||
* Creation time, in milliseconds since the unix epoch.
|
||||
*
|
||||
* May not be a local timestamp.
|
||||
*/
|
||||
abstract val dateAdded: Long
|
||||
|
||||
/**
|
||||
* Last modification time, in milliseconds since the unix epoch.
|
||||
*/
|
||||
abstract val lastModified: Long
|
||||
|
||||
/**
|
||||
* The guid of this record's parent. It should only be null for
|
||||
* [BookmarkRoot.Root].
|
||||
*/
|
||||
abstract val parentGUID: String?
|
||||
|
||||
/**
|
||||
* The (0-based) position of this record within its parent.
|
||||
*/
|
||||
abstract val position: Int
|
||||
}
|
||||
|
||||
/**
|
||||
* A bookmark tree node that represents a bookmarked URL.
|
||||
*
|
||||
* Its type is always [BookmarkType.Bookmark], and it has a `title `and `url`
|
||||
* in addition to the fields defined by [BookmarkTreeNode].
|
||||
*/
|
||||
|
||||
data class BookmarkItem(
|
||||
override val guid: String,
|
||||
override val dateAdded: Long,
|
||||
override val lastModified: Long,
|
||||
override val parentGUID: String?,
|
||||
override val position: Int,
|
||||
|
||||
/**
|
||||
* The URL of this bookmark.
|
||||
*/
|
||||
val url: String,
|
||||
|
||||
/**
|
||||
* The title of the bookmark.
|
||||
*
|
||||
* Note that the bookmark storage layer treats NULL and the
|
||||
* empty string as equivalent in titles.
|
||||
*/
|
||||
val title: String
|
||||
) : BookmarkTreeNode() {
|
||||
override val type get() = BookmarkType.Bookmark
|
||||
}
|
||||
|
||||
/**
|
||||
* A bookmark which is a folder.
|
||||
*
|
||||
* Its type is always [BookmarkType.Folder], and it has a `title`,
|
||||
* a list of `childGUIDs`, and possibly a list of `children` in
|
||||
* addition to those defined by [BookmarkTreeNode].
|
||||
*/
|
||||
data class BookmarkFolder(
|
||||
override val guid: String,
|
||||
override val dateAdded: Long,
|
||||
override val lastModified: Long,
|
||||
override val parentGUID: String?,
|
||||
override val position: Int,
|
||||
|
||||
/**
|
||||
* The title of this bookmark folder, if any was provided.
|
||||
*
|
||||
* Note that the bookmark storage layer treats NULL and the
|
||||
* empty string as equivalent in titles.
|
||||
*/
|
||||
val title: String,
|
||||
|
||||
/**
|
||||
* The GUIDs of this folder's list of children.
|
||||
*/
|
||||
val childGUIDs: List<String>,
|
||||
|
||||
/**
|
||||
* If this node was returned the [ReadableBookmarksConnection.getBookmarksTree]
|
||||
* method, then this should have the list of children.
|
||||
*/
|
||||
val children: List<BookmarkTreeNode>?
|
||||
|
||||
) : BookmarkTreeNode() {
|
||||
override val type get() = BookmarkType.Folder
|
||||
}
|
||||
|
||||
/**
|
||||
* A bookmark which is a separator.
|
||||
*
|
||||
* Its type is always [BookmarkType.Separator], and it has no fields
|
||||
* besides those defined by [BookmarkTreeNode].
|
||||
*/
|
||||
data class BookmarkSeparator(
|
||||
override val guid: String,
|
||||
override val dateAdded: Long,
|
||||
override val lastModified: Long,
|
||||
override val parentGUID: String?,
|
||||
override val position: Int
|
||||
) : BookmarkTreeNode() {
|
||||
override val type get() = BookmarkType.Separator
|
||||
}
|
||||
|
||||
/**
|
||||
* The methods provided by a read-only or a read-write bookmarks connection.
|
||||
*/
|
||||
|
@ -182,7 +48,7 @@ interface ReadableBookmarksConnection : InterruptibleConnection {
|
|||
* @throws OperationInterrupted if this database implements [InterruptibleConnection] and
|
||||
* has its `interrupt()` method called on another thread.
|
||||
*/
|
||||
fun getBookmarksTree(rootGUID: String, recursive: Boolean): BookmarkTreeNode?
|
||||
fun getBookmarksTree(rootGUID: Guid, recursive: Boolean): BookmarkItem?
|
||||
|
||||
/**
|
||||
* Returns the information about the bookmark with the provided id. This differs from
|
||||
|
@ -196,7 +62,7 @@ interface ReadableBookmarksConnection : InterruptibleConnection {
|
|||
* @throws OperationInterrupted if this database implements [InterruptibleConnection] and
|
||||
* has its `interrupt()` method called on another thread.
|
||||
*/
|
||||
fun getBookmark(guid: String): BookmarkTreeNode?
|
||||
fun getBookmark(guid: Guid): BookmarkItem?
|
||||
|
||||
/**
|
||||
* Returns the list of bookmarks with the provided URL.
|
||||
|
@ -223,7 +89,7 @@ interface ReadableBookmarksConnection : InterruptibleConnection {
|
|||
* @throws OperationInterrupted if this database implements [InterruptibleConnection] and
|
||||
* has its `interrupt()` method called on another thread.
|
||||
*/
|
||||
fun getBookmarkUrlForKeyword(keyword: String): String?
|
||||
fun getBookmarkUrlForKeyword(keyword: String): Url?
|
||||
|
||||
/**
|
||||
* Returns the list of bookmarks that match the provided search string.
|
||||
|
@ -271,7 +137,7 @@ interface WritableBookmarksConnection : ReadableBookmarksConnection {
|
|||
*
|
||||
* @throws CannotUpdateRoot If `guid` refers to a bookmark root.
|
||||
*/
|
||||
fun deleteBookmarkNode(guid: String): Boolean
|
||||
fun deleteBookmarkNode(guid: Guid): Boolean
|
||||
|
||||
/**
|
||||
* Delete all bookmarks without affecting history
|
||||
|
@ -295,10 +161,10 @@ interface WritableBookmarksConnection : ReadableBookmarksConnection {
|
|||
* @throws InvalidParent If `parentGUID` does not refer to a folder node.
|
||||
*/
|
||||
fun createFolder(
|
||||
parentGUID: String,
|
||||
parentGUID: Guid,
|
||||
title: String,
|
||||
position: Int? = null
|
||||
): String
|
||||
position: UInt? = null
|
||||
): Guid
|
||||
|
||||
/**
|
||||
* Create a bookmark separator, returning its guid.
|
||||
|
@ -315,9 +181,9 @@ interface WritableBookmarksConnection : ReadableBookmarksConnection {
|
|||
* @throws InvalidParent If `parentGUID` does not refer to a folder node.
|
||||
*/
|
||||
fun createSeparator(
|
||||
parentGUID: String,
|
||||
position: Int? = null
|
||||
): String
|
||||
parentGUID: Guid,
|
||||
position: UInt? = null
|
||||
): Guid
|
||||
|
||||
/**
|
||||
* Create a bookmark item, returning its guid.
|
||||
|
@ -338,17 +204,20 @@ interface WritableBookmarksConnection : ReadableBookmarksConnection {
|
|||
* @throws UrlTooLong if `url` exceeds the maximum length of 65536 bytes (when encoded)
|
||||
*/
|
||||
fun createBookmarkItem(
|
||||
parentGUID: String,
|
||||
url: String,
|
||||
parentGUID: Guid,
|
||||
url: Url,
|
||||
title: String,
|
||||
position: Int? = null
|
||||
): String
|
||||
position: UInt? = null
|
||||
): Guid
|
||||
|
||||
/**
|
||||
* Update a bookmark to the provided info.
|
||||
*
|
||||
* @param guid GUID of the bookmark to update
|
||||
* @param info The changes to make to the listed bookmark.
|
||||
* @param parentGuid The new parent guid for the listed bookmark.
|
||||
* @param position The new position for the listed bookmark.
|
||||
* @param title The new title for the listed bookmark.
|
||||
* @param url The new url the listed bookmark.
|
||||
*
|
||||
* @throws InvalidBookmarkUpdate If the change requested is impossible given the
|
||||
* type of the item in the DB. For example, on attempts to update the title of a separator.
|
||||
|
@ -359,184 +228,5 @@ interface WritableBookmarksConnection : ReadableBookmarksConnection {
|
|||
* @throws InvalidParent If `info.parentGUID` is specified, but does not refer to a
|
||||
* folder node.
|
||||
*/
|
||||
fun updateBookmark(guid: String, info: BookmarkUpdateInfo)
|
||||
}
|
||||
|
||||
/**
|
||||
* Information describing the changes to make in order to update a bookmark.
|
||||
*/
|
||||
data class BookmarkUpdateInfo(
|
||||
/**
|
||||
* If the record should be moved to another folder, the guid
|
||||
* of the folder it should be moved to. Interacts with
|
||||
* `position`, see its documentation for details.
|
||||
*/
|
||||
val parentGUID: String? = null,
|
||||
|
||||
/**
|
||||
* If the record should be moved, the 0-based index where it
|
||||
* should be moved to. Interacts with `parentGUID` as follows:
|
||||
*
|
||||
* - If `parentGUID` is not provided and `position` is, we treat this
|
||||
* a move within the same folder.
|
||||
*
|
||||
* - If `parentGUID` and `position` are both provided, we treat this as
|
||||
* a move to / within that folder, and we insert at the requested
|
||||
* position.
|
||||
*
|
||||
* - If `position` is not provided (and `parentGUID` is) then its
|
||||
* treated as a move to the end of that folder.
|
||||
*
|
||||
* If position is provided and is outside the range of positions currently
|
||||
* occupied by children in this folder, it is first constrained to
|
||||
* be within that range.
|
||||
*/
|
||||
val position: Int? = null,
|
||||
|
||||
/**
|
||||
* For nodes of type [BookmarkType.Bookmark] and [BookmarkType.Folder],
|
||||
* a string specifying the new title of the bookmark node.
|
||||
*/
|
||||
val title: String? = null,
|
||||
|
||||
/**
|
||||
* For nodes of type [BookmarkType.Bookmark], a string specifying
|
||||
* the new url of the bookmark node.
|
||||
*/
|
||||
val url: String? = null
|
||||
) {
|
||||
|
||||
internal fun toProtobuf(guid: String): MsgTypes.BookmarkNode {
|
||||
val builder = MsgTypes.BookmarkNode.newBuilder()
|
||||
builder.setGuid(guid)
|
||||
this.position?.let { builder.setPosition(it) }
|
||||
this.parentGUID?.let { builder.setParentGuid(it) }
|
||||
this.title?.let { builder.setTitle(it) }
|
||||
this.url?.let { builder.setUrl(it) }
|
||||
return builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Error indicating bookmarks corruption. If this occurs, we
|
||||
* would appreciate reports.
|
||||
*
|
||||
* Eventually it should be fixed up, when detected as part of
|
||||
* `runMaintenance`.
|
||||
*/
|
||||
open class BookmarksCorruption(msg: String) : PlacesException(msg)
|
||||
|
||||
/**
|
||||
* Thrown when attempting to insert a URL greater than 65536 bytes
|
||||
* (after punycoding and percent encoding).
|
||||
*
|
||||
* Attempting to truncate the URL is difficult and subtle, and
|
||||
* is guaranteed to result in a URL different from the one the
|
||||
* user attempted to bookmark, and so an error is thrown instead.
|
||||
*/
|
||||
open class UrlTooLong(msg: String) : PlacesException(msg)
|
||||
|
||||
/**
|
||||
* Thrown when attempting to update a bookmark item in an illegal
|
||||
* way. For example, attempting to change the URL of a bookmark
|
||||
* folder, or update the title of a separator, etc.
|
||||
*/
|
||||
open class InvalidBookmarkUpdate(msg: String) : PlacesException(msg)
|
||||
|
||||
/**
|
||||
* Thrown when providing a guid to a create or update function
|
||||
* which does not refer to a known bookmark.
|
||||
*/
|
||||
open class UnknownBookmarkItem(msg: String) : PlacesException(msg)
|
||||
|
||||
/**
|
||||
* Thrown when:
|
||||
*
|
||||
* - Attempting to insert a child under BookmarkRoot.Root,
|
||||
* - Attempting to update any of the bookmark roots.
|
||||
* - Attempting to delete any of the bookmark roots.
|
||||
*/
|
||||
open class CannotUpdateRoot(msg: String) : PlacesException(msg)
|
||||
|
||||
/**
|
||||
* Thrown when providing a guid referring to a non-folder as the
|
||||
* parentGUID parameter to a create or update
|
||||
*/
|
||||
open class InvalidParent(msg: String) : PlacesException(msg)
|
||||
|
||||
/**
|
||||
* Turn the protobuf rust passes us into a BookmarkTreeNode.
|
||||
*
|
||||
* Note that we have no way to determine empty lists and lists that weren't provided, so we pass
|
||||
* in what we.
|
||||
* expect as a boolean flag (shouldHaveChildNodes).
|
||||
*/
|
||||
@Suppress("ComplexMethod", "ReturnCount", "TooGenericExceptionThrown")
|
||||
internal fun unpackProtobuf(msg: MsgTypes.BookmarkNode): BookmarkTreeNode {
|
||||
val guid = msg.guid
|
||||
val parentGUID = msg.parentGuid
|
||||
val position = msg.position
|
||||
val dateAdded = msg.dateAdded
|
||||
val lastModified = msg.lastModified
|
||||
val type = msg.nodeType
|
||||
val title = if (msg.hasTitle()) { msg.title } else { "" }
|
||||
val shouldHaveChildNodes = if (msg.hasHaveChildNodes()) { msg.haveChildNodes } else { false }
|
||||
when (type) {
|
||||
|
||||
BookmarkType.Bookmark.value -> {
|
||||
return BookmarkItem(
|
||||
guid = guid,
|
||||
parentGUID = parentGUID,
|
||||
position = position,
|
||||
dateAdded = dateAdded,
|
||||
lastModified = lastModified,
|
||||
title = title,
|
||||
url = msg.url
|
||||
)
|
||||
}
|
||||
|
||||
BookmarkType.Separator.value -> {
|
||||
return BookmarkSeparator(
|
||||
guid = guid,
|
||||
parentGUID = parentGUID,
|
||||
position = position,
|
||||
dateAdded = dateAdded,
|
||||
lastModified = lastModified
|
||||
)
|
||||
}
|
||||
|
||||
BookmarkType.Folder.value -> {
|
||||
val childNodes: List<BookmarkTreeNode> = msg.childNodesList.map {
|
||||
child -> unpackProtobuf(child)
|
||||
}
|
||||
var childGuids = msg.childGuidsList
|
||||
|
||||
// If we got child nodes instead of guids, use the nodes to get the guids.
|
||||
if (childGuids.isEmpty() && childNodes.isNotEmpty()) {
|
||||
childGuids = childNodes.map { child -> child.guid }
|
||||
}
|
||||
|
||||
return BookmarkFolder(
|
||||
guid = guid,
|
||||
parentGUID = parentGUID,
|
||||
position = position,
|
||||
dateAdded = dateAdded,
|
||||
lastModified = lastModified,
|
||||
title = title,
|
||||
childGUIDs = childGuids,
|
||||
children = if (shouldHaveChildNodes) { childNodes } else { null }
|
||||
)
|
||||
}
|
||||
|
||||
else -> {
|
||||
// Should never happen
|
||||
throw RuntimeException("Rust passed in an illegal bookmark type $type")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Unpack results from getBookmarksWithURL and searchBookmarks. Both of these can only return
|
||||
// BookmarkItems, so we just do the cast inside the mapper.
|
||||
internal fun unpackProtobufItemList(msg: MsgTypes.BookmarkNodeList): List<BookmarkItem> {
|
||||
return msg.nodesList.map { unpackProtobuf(it) as BookmarkItem }
|
||||
fun updateBookmark(guid: Guid, parentGuid: Guid?, position: UInt?, title: String?, url: Url?)
|
||||
}
|
||||
|
|
|
@ -1,328 +0,0 @@
|
|||
@file:Suppress("MaxLineLength")
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package mozilla.appservices.places
|
||||
|
||||
import com.sun.jna.Library
|
||||
import com.sun.jna.Pointer
|
||||
import com.sun.jna.PointerType
|
||||
import com.sun.jna.StringArray
|
||||
import mozilla.appservices.support.native.loadIndirect
|
||||
import org.mozilla.appservices.places.BuildConfig
|
||||
|
||||
import mozilla.appservices.support.native.RustBuffer
|
||||
|
||||
@Suppress("FunctionNaming", "FunctionParameterNaming", "LongParameterList", "TooGenericExceptionThrown")
|
||||
internal interface LibPlacesFFI : Library {
|
||||
companion object {
|
||||
internal var INSTANCE: LibPlacesFFI =
|
||||
loadIndirect(componentName = "places", componentVersion = BuildConfig.LIBRARY_VERSION)
|
||||
}
|
||||
|
||||
// Important: strings returned from rust as *mut char must be Pointers on this end, returning a
|
||||
// String will work but either force us to leak them, or cause us to corrupt the heap (when we
|
||||
// free them).
|
||||
|
||||
/** Create a new places api */
|
||||
fun places_api_new(
|
||||
db_path: String,
|
||||
out_err: RustError.ByReference
|
||||
): PlacesApiHandle
|
||||
|
||||
/** Create a new places connection */
|
||||
fun places_connection_new(
|
||||
handle: PlacesApiHandle,
|
||||
conn_type: Int,
|
||||
out_err: RustError.ByReference
|
||||
): PlacesConnectionHandle
|
||||
|
||||
fun places_api_register_with_sync_manager(
|
||||
handle: PlacesApiHandle,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
// Returns a JSON string containing bookmark import metrics
|
||||
fun places_bookmarks_import_from_fennec(
|
||||
handle: PlacesApiHandle,
|
||||
db_path: String,
|
||||
out_err: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
fun places_pinned_sites_import_from_fennec(
|
||||
handle: PlacesApiHandle,
|
||||
db_path: String,
|
||||
out_err: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
// Returns a JSON string containing import metrics
|
||||
fun places_history_import_from_fennec(
|
||||
handle: PlacesApiHandle,
|
||||
db_path: String,
|
||||
out_err: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
fun places_note_observation(
|
||||
handle: PlacesConnectionHandle,
|
||||
json_observation_data: String,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
/** Returns JSON string, which you need to free with places_destroy_string */
|
||||
fun places_query_autocomplete(
|
||||
handle: PlacesConnectionHandle,
|
||||
search: String,
|
||||
limit: Int,
|
||||
out_err: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
/** Returns a URL, or null if no match was found. */
|
||||
fun places_match_url(
|
||||
handle: PlacesConnectionHandle,
|
||||
search: String,
|
||||
out_err: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
/** Note: urls_len and buffer_len must be the same length. The argument is somewhat redundant, but
|
||||
* is provided for a slight additional amount of sanity checking. These lengths are the number
|
||||
* of elements present (and not e.g. the number of bytes allocated). */
|
||||
fun places_get_visited(
|
||||
handle: PlacesConnectionHandle,
|
||||
urls: StringArray,
|
||||
urls_len: Int,
|
||||
buffer: Pointer,
|
||||
buf_len: Int,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_get_visited_urls_in_range(
|
||||
handle: PlacesConnectionHandle,
|
||||
start: Long,
|
||||
end: Long,
|
||||
include_remote: Byte,
|
||||
out_err: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
fun places_new_interrupt_handle(
|
||||
conn: PlacesConnectionHandle,
|
||||
out_err: RustError.ByReference
|
||||
): RawPlacesInterruptHandle?
|
||||
|
||||
fun places_new_sync_conn_interrupt_handle(
|
||||
api: PlacesApiHandle,
|
||||
out_err: RustError.ByReference
|
||||
): RawPlacesInterruptHandle?
|
||||
|
||||
fun places_interrupt(
|
||||
conn: RawPlacesInterruptHandle,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_delete_visits_for(
|
||||
handle: PlacesConnectionHandle,
|
||||
url: String,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_delete_visits_between(
|
||||
handle: PlacesConnectionHandle,
|
||||
start: Long,
|
||||
end: Long,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_delete_visit(
|
||||
handle: PlacesConnectionHandle,
|
||||
visit_url: String,
|
||||
visit_timestamp: Long,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_wipe_local(
|
||||
handle: PlacesConnectionHandle,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_run_maintenance(
|
||||
handle: PlacesConnectionHandle,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_prune_destructively(
|
||||
handle: PlacesConnectionHandle,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_delete_everything(
|
||||
handle: PlacesConnectionHandle,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
|
||||
fun places_get_top_frecent_site_infos(
|
||||
handle: PlacesConnectionHandle,
|
||||
numItems: Int,
|
||||
frecencyThreshold: Long,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun places_get_visit_infos(
|
||||
handle: PlacesConnectionHandle,
|
||||
startDate: Long,
|
||||
endDate: Long,
|
||||
excludeTypes: Int,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun places_get_visit_page(
|
||||
handle: PlacesConnectionHandle,
|
||||
offset: Long,
|
||||
count: Long,
|
||||
excludeTypes: Int,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun places_get_visit_page_with_bound(
|
||||
handle: PlacesConnectionHandle,
|
||||
bound: Long,
|
||||
offset: Long,
|
||||
count: Long,
|
||||
excludeTypes: Int,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun places_get_visit_count(
|
||||
handle: PlacesConnectionHandle,
|
||||
excludeTypes: Int,
|
||||
error: RustError.ByReference
|
||||
): Long
|
||||
|
||||
fun places_reset(
|
||||
handle: PlacesApiHandle,
|
||||
error: RustError.ByReference
|
||||
)
|
||||
|
||||
// Returns a JSON string containing a sync ping.
|
||||
fun sync15_history_sync(
|
||||
handle: PlacesApiHandle,
|
||||
key_id: String,
|
||||
access_token: String,
|
||||
sync_key: String,
|
||||
tokenserver_url: String,
|
||||
out_err: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
// Returns a JSON string containing a sync ping.
|
||||
fun sync15_bookmarks_sync(
|
||||
handle: PlacesApiHandle,
|
||||
key_id: String,
|
||||
access_token: String,
|
||||
sync_key: String,
|
||||
tokenserver_url: String,
|
||||
out_err: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
fun bookmarks_get_all_with_url(
|
||||
handle: PlacesConnectionHandle,
|
||||
url: String,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun bookmarks_get_url_for_keyword(
|
||||
handle: PlacesConnectionHandle,
|
||||
keyword: String,
|
||||
error: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
fun bookmarks_get_tree(
|
||||
handle: PlacesConnectionHandle,
|
||||
optRootId: String?,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun bookmarks_get_by_guid(
|
||||
handle: PlacesConnectionHandle,
|
||||
optRootId: String?,
|
||||
getDirectChildren: Byte,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun bookmarks_search(
|
||||
handle: PlacesConnectionHandle,
|
||||
search: String,
|
||||
limit: Int,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
fun bookmarks_get_recent(
|
||||
handle: PlacesConnectionHandle,
|
||||
limit: Int,
|
||||
error: RustError.ByReference
|
||||
): RustBuffer.ByValue
|
||||
|
||||
// Returns newly inserted guid
|
||||
fun bookmarks_insert(
|
||||
handle: PlacesConnectionHandle,
|
||||
data: Pointer,
|
||||
len: Int,
|
||||
error: RustError.ByReference
|
||||
): Pointer?
|
||||
|
||||
fun bookmarks_update(
|
||||
handle: PlacesConnectionHandle,
|
||||
data: Pointer,
|
||||
len: Int,
|
||||
error: RustError.ByReference
|
||||
)
|
||||
|
||||
// Returns 1 if the item existed and was deleted.
|
||||
fun bookmarks_delete(
|
||||
handle: PlacesConnectionHandle,
|
||||
id: String,
|
||||
error: RustError.ByReference
|
||||
): Byte
|
||||
|
||||
fun bookmarks_delete_everything(
|
||||
handle: PlacesConnectionHandle,
|
||||
error: RustError.ByReference
|
||||
)
|
||||
|
||||
fun bookmarks_reset(
|
||||
handle: PlacesApiHandle,
|
||||
error: RustError.ByReference
|
||||
)
|
||||
|
||||
/** Destroy strings returned from libplaces_ffi calls. */
|
||||
fun places_destroy_string(s: Pointer)
|
||||
|
||||
fun places_api_return_write_conn(
|
||||
apiHandle: PlacesApiHandle,
|
||||
writeHandle: PlacesConnectionHandle,
|
||||
err: RustError.ByReference
|
||||
)
|
||||
|
||||
/** Destroy connection created using `places_connection_new` */
|
||||
fun places_connection_destroy(handle: PlacesConnectionHandle, out_err: RustError.ByReference)
|
||||
|
||||
/** Destroy api created using `places_api_new` */
|
||||
fun places_api_destroy(handle: PlacesApiHandle, out_err: RustError.ByReference)
|
||||
|
||||
/** Destroy handle created using `places_new_interrupt_handle` */
|
||||
fun places_interrupt_handle_destroy(obj: RawPlacesInterruptHandle)
|
||||
|
||||
fun places_destroy_bytebuffer(bb: RustBuffer.ByValue)
|
||||
|
||||
fun places_accept_result(
|
||||
handle: PlacesConnectionHandle,
|
||||
search_string: String,
|
||||
url: String,
|
||||
out_err: RustError.ByReference
|
||||
)
|
||||
}
|
||||
|
||||
internal typealias PlacesConnectionHandle = Long
|
||||
internal typealias PlacesApiHandle = Long
|
||||
|
||||
// This doesn't use a handle to avoid unnecessary locking and
|
||||
// because the type is panic safe, sync, and send.
|
||||
class RawPlacesInterruptHandle : PointerType()
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,89 +0,0 @@
|
|||
/* Copyright 2018 Mozilla
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
* this file except in compliance with the License. You may obtain a copy of the
|
||||
* License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software distributed
|
||||
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License. */
|
||||
package mozilla.appservices.places
|
||||
|
||||
import com.sun.jna.Pointer
|
||||
import com.sun.jna.Structure
|
||||
|
||||
@Structure.FieldOrder("code", "message")
|
||||
internal open class RustError : Structure() {
|
||||
|
||||
class ByReference : RustError(), Structure.ByReference
|
||||
|
||||
companion object {
|
||||
fun makeException(code: Int, message: String): PlacesException {
|
||||
return when (code) {
|
||||
2 -> UrlParseFailed(message)
|
||||
3 -> PlacesConnectionBusy(message)
|
||||
4 -> OperationInterrupted(message)
|
||||
5 -> BookmarksCorruption(message)
|
||||
|
||||
64 -> InvalidParent(message)
|
||||
65 -> UnknownBookmarkItem(message)
|
||||
66 -> UrlTooLong(message)
|
||||
67 -> InvalidBookmarkUpdate(message)
|
||||
68 -> CannotUpdateRoot(message)
|
||||
|
||||
-1 -> InternalPanic(message)
|
||||
// Note: `1` is used as a generic catch all, but we
|
||||
// might as well handle the others the same way.
|
||||
else -> PlacesException(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@JvmField var code: Int = 0
|
||||
@JvmField var message: Pointer? = null
|
||||
/**
|
||||
* Does this represent success?
|
||||
*/
|
||||
fun isSuccess(): Boolean {
|
||||
return code == 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this represent failure?
|
||||
*/
|
||||
fun isFailure(): Boolean {
|
||||
return code != 0
|
||||
}
|
||||
|
||||
@Suppress("ComplexMethod", "ReturnCount", "TooGenericExceptionThrown")
|
||||
fun intoException(): PlacesException {
|
||||
if (!isFailure()) {
|
||||
// It's probably a bad idea to throw here! We're probably leaking something if this is
|
||||
// ever hit! (But we shouldn't ever hit it?)
|
||||
throw RuntimeException("[Bug] intoException called on non-failure!")
|
||||
}
|
||||
val message = this.consumeErrorMessage()
|
||||
return makeException(code, message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get and consume the error message, or null if there is none.
|
||||
*/
|
||||
fun consumeErrorMessage(): String {
|
||||
val result = this.getMessage()
|
||||
if (this.message != null) {
|
||||
LibPlacesFFI.INSTANCE.places_destroy_string(this.message!!)
|
||||
this.message = null
|
||||
}
|
||||
if (result == null) {
|
||||
throw NullPointerException("consumeErrorMessage called with null message!")
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the error message or null if there is none.
|
||||
*/
|
||||
fun getMessage(): String? {
|
||||
return this.message?.getString(0, "utf8")
|
||||
}
|
||||
}
|
|
@ -7,7 +7,12 @@ import androidx.test.core.app.ApplicationProvider
|
|||
import kotlinx.coroutines.runBlocking
|
||||
import mozilla.appservices.Megazord
|
||||
import mozilla.appservices.places.uniffi.DocumentType
|
||||
import mozilla.appservices.places.uniffi.VisitObservation
|
||||
import mozilla.appservices.places.uniffi.VisitTransition
|
||||
import mozilla.appservices.places.uniffi.FrecencyThresholdOption
|
||||
import mozilla.appservices.syncmanager.SyncManager
|
||||
import mozilla.appservices.places.uniffi.PlacesException
|
||||
import mozilla.appservices.places.uniffi.BookmarkItem
|
||||
import mozilla.components.service.glean.testing.GleanTestRule
|
||||
import org.junit.After
|
||||
import org.junit.Assert.assertEquals
|
||||
|
@ -75,7 +80,7 @@ class PlacesConnectionTest {
|
|||
)
|
||||
|
||||
for (url in toAdd) {
|
||||
db.noteObservation(VisitObservation(url = url, visitType = VisitType.LINK))
|
||||
db.noteObservation(VisitObservation(url = url, visitType = VisitTransition.LINK))
|
||||
}
|
||||
|
||||
val toSearch = listOf(
|
||||
|
@ -124,9 +129,9 @@ class PlacesConnectionTest {
|
|||
@Test
|
||||
fun testNoteObservationBadUrl() {
|
||||
try {
|
||||
db.noteObservation(VisitObservation(url = "http://www.[].com", visitType = VisitType.LINK))
|
||||
db.noteObservation(VisitObservation(url = "http://www.[].com", visitType = VisitTransition.LINK))
|
||||
} catch (e: PlacesException) {
|
||||
assert(e is UrlParseFailed)
|
||||
assert(e is PlacesException.UrlParseFailed)
|
||||
}
|
||||
}
|
||||
// Basically equivalent to test_get_visited in rust, but exercises the FFI,
|
||||
|
@ -146,7 +151,7 @@ class PlacesConnectionTest {
|
|||
)
|
||||
|
||||
for (url in toAdd) {
|
||||
db.noteObservation(VisitObservation(url = url, visitType = VisitType.LINK))
|
||||
db.noteObservation(VisitObservation(url = url, visitType = VisitTransition.LINK))
|
||||
}
|
||||
// Should use the origin search
|
||||
assertEquals("https://www.example.com/", db.matchUrl("example.com"))
|
||||
|
@ -184,25 +189,25 @@ class PlacesConnectionTest {
|
|||
fun testObservingPreviewImage() {
|
||||
db.noteObservation(VisitObservation(
|
||||
url = "https://www.example.com/0",
|
||||
visitType = VisitType.LINK)
|
||||
visitType = VisitTransition.LINK)
|
||||
)
|
||||
|
||||
db.noteObservation(VisitObservation(
|
||||
url = "https://www.example.com/1",
|
||||
visitType = VisitType.LINK)
|
||||
visitType = VisitTransition.LINK)
|
||||
)
|
||||
|
||||
// Can change preview image.
|
||||
db.noteObservation(VisitObservation(
|
||||
url = "https://www.example.com/1",
|
||||
visitType = VisitType.LINK,
|
||||
visitType = VisitTransition.LINK,
|
||||
previewImageUrl = "https://www.example.com/1/previewImage.png")
|
||||
)
|
||||
|
||||
// Can make an initial observation with the preview image.
|
||||
db.noteObservation(VisitObservation(
|
||||
url = "https://www.example.com/2",
|
||||
visitType = VisitType.LINK,
|
||||
visitType = VisitTransition.LINK,
|
||||
previewImageUrl = "https://www.example.com/2/previewImage.png")
|
||||
)
|
||||
|
||||
|
@ -216,12 +221,12 @@ class PlacesConnectionTest {
|
|||
|
||||
@Test
|
||||
fun testGetTopFrecentSiteInfos() {
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.DOWNLOAD))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.EMBED))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.REDIRECT_PERMANENT))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.REDIRECT_TEMPORARY))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.FRAMED_LINK))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.RELOAD))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.DOWNLOAD))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.EMBED))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.REDIRECT_PERMANENT))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.REDIRECT_TEMPORARY))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.FRAMED_LINK))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.RELOAD))
|
||||
|
||||
val toAdd = listOf(
|
||||
"https://www.example.com/123",
|
||||
|
@ -235,7 +240,7 @@ class PlacesConnectionTest {
|
|||
)
|
||||
|
||||
for (url in toAdd) {
|
||||
db.noteObservation(VisitObservation(url = url, visitType = VisitType.LINK))
|
||||
db.noteObservation(VisitObservation(url = url, visitType = VisitTransition.LINK))
|
||||
}
|
||||
|
||||
var infos = db.getTopFrecentSiteInfos(numItems = 0, frecencyThreshold = FrecencyThresholdOption.NONE)
|
||||
|
@ -279,11 +284,11 @@ class PlacesConnectionTest {
|
|||
// as well as the handling of invalid urls.
|
||||
@Test
|
||||
fun testGetVisitInfos() {
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.LINK, at = 100000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2a", visitType = VisitType.REDIRECT_TEMPORARY, at = 130000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2b", visitType = VisitType.LINK, at = 150000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3", visitType = VisitType.LINK, at = 200000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/4", visitType = VisitType.LINK, at = 250000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.LINK, at = 100000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2a", visitType = VisitTransition.REDIRECT_TEMPORARY, at = 130000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2b", visitType = VisitTransition.LINK, at = 150000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3", visitType = VisitTransition.LINK, at = 200000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/4", visitType = VisitTransition.LINK, at = 250000))
|
||||
var infos = db.getVisitInfos(125000, 225000, excludeTypes = listOf(VisitType.REDIRECT_TEMPORARY))
|
||||
assertEquals(2, infos.size)
|
||||
assertEquals("https://www.example.com/2b", infos[0].url)
|
||||
|
@ -297,15 +302,15 @@ class PlacesConnectionTest {
|
|||
|
||||
@Test
|
||||
fun testGetVisitPage() {
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitType.LINK, at = 100000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2", visitType = VisitType.LINK, at = 110000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3a", visitType = VisitType.REDIRECT_TEMPORARY, at = 120000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3b", visitType = VisitType.REDIRECT_TEMPORARY, at = 130000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/4", visitType = VisitType.LINK, at = 140000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/5", visitType = VisitType.LINK, at = 150000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/6", visitType = VisitType.LINK, at = 160000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/7", visitType = VisitType.LINK, at = 170000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/8", visitType = VisitType.LINK, at = 180000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/1", visitType = VisitTransition.LINK, at = 100000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2", visitType = VisitTransition.LINK, at = 110000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3a", visitType = VisitTransition.REDIRECT_TEMPORARY, at = 120000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3b", visitType = VisitTransition.REDIRECT_TEMPORARY, at = 130000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/4", visitType = VisitTransition.LINK, at = 140000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/5", visitType = VisitTransition.LINK, at = 150000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/6", visitType = VisitTransition.LINK, at = 160000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/7", visitType = VisitTransition.LINK, at = 170000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/8", visitType = VisitTransition.LINK, at = 180000))
|
||||
|
||||
assertEquals(9, db.getVisitCount())
|
||||
assertEquals(7, db.getVisitCount(excludeTypes = listOf(VisitType.REDIRECT_TEMPORARY)))
|
||||
|
@ -347,7 +352,7 @@ class PlacesConnectionTest {
|
|||
|
||||
val sepGUID = db.createSeparator(
|
||||
parentGUID = BookmarkRoot.Unfiled.id,
|
||||
position = 0
|
||||
position = 0u
|
||||
)
|
||||
|
||||
val folderGUID = db.createFolder(
|
||||
|
@ -355,25 +360,21 @@ class PlacesConnectionTest {
|
|||
title = "example folder"
|
||||
)
|
||||
|
||||
val item = db.getBookmark(itemGUID)!! as BookmarkItem
|
||||
val sep = db.getBookmark(sepGUID)!! as BookmarkSeparator
|
||||
val folder = db.getBookmark(folderGUID)!! as BookmarkFolder
|
||||
val item = db.getBookmark(itemGUID)!! as BookmarkItem.Bookmark
|
||||
val sep = db.getBookmark(sepGUID)!! as BookmarkItem.Separator
|
||||
val folder = db.getBookmark(folderGUID)!! as BookmarkItem.Folder
|
||||
|
||||
assertEquals(item.type, BookmarkType.Bookmark)
|
||||
assertEquals(sep.type, BookmarkType.Separator)
|
||||
assertEquals(folder.type, BookmarkType.Folder)
|
||||
assertEquals(item.b.title, "example")
|
||||
assertEquals(item.b.url, "https://www.example.com/")
|
||||
assertEquals(item.b.position, 1u)
|
||||
assertEquals(item.b.parentGuid, BookmarkRoot.Unfiled.id)
|
||||
|
||||
assertEquals(item.title, "example")
|
||||
assertEquals(item.url, "https://www.example.com/")
|
||||
assertEquals(item.position, 1)
|
||||
assertEquals(item.parentGUID, BookmarkRoot.Unfiled.id)
|
||||
assertEquals(sep.s.position, 0u)
|
||||
assertEquals(sep.s.parentGuid, BookmarkRoot.Unfiled.id)
|
||||
|
||||
assertEquals(sep.position, 0)
|
||||
assertEquals(sep.parentGUID, BookmarkRoot.Unfiled.id)
|
||||
|
||||
assertEquals(folder.title, "example folder")
|
||||
assertEquals(folder.position, 2)
|
||||
assertEquals(folder.parentGUID, BookmarkRoot.Unfiled.id)
|
||||
assertEquals(folder.f.title, "example folder")
|
||||
assertEquals(folder.f.position, 2u)
|
||||
assertEquals(folder.f.parentGuid, BookmarkRoot.Unfiled.id)
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -381,17 +382,17 @@ class PlacesConnectionTest {
|
|||
assert(!PlacesManagerMetrics.writeQueryCount.testHasValue())
|
||||
assert(!PlacesManagerMetrics.writeQueryErrorCount["url_parse_failed"].testHasValue())
|
||||
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2a", visitType = VisitType.REDIRECT_TEMPORARY, at = 130000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2b", visitType = VisitType.LINK, at = 150000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3", visitType = VisitType.LINK, at = 200000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2a", visitType = VisitTransition.REDIRECT_TEMPORARY, at = 130000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/2b", visitType = VisitTransition.LINK, at = 150000))
|
||||
db.noteObservation(VisitObservation(url = "https://www.example.com/3", visitType = VisitTransition.LINK, at = 200000))
|
||||
|
||||
assertEquals(3, PlacesManagerMetrics.writeQueryCount.testGetValue())
|
||||
assert(!PlacesManagerMetrics.writeQueryErrorCount["__other__"].testHasValue())
|
||||
|
||||
try {
|
||||
db.noteObservation(VisitObservation(url = "4", visitType = VisitType.REDIRECT_TEMPORARY, at = 160000))
|
||||
db.noteObservation(VisitObservation(url = "4", visitType = VisitTransition.REDIRECT_TEMPORARY, at = 160000))
|
||||
fail("Should have thrown")
|
||||
} catch (e: UrlParseFailed) {
|
||||
} catch (e: PlacesException.UrlParseFailed) {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
|
@ -437,7 +438,7 @@ class PlacesConnectionTest {
|
|||
title = "example"
|
||||
)
|
||||
fail("Should have thrown")
|
||||
} catch (e: UrlParseFailed) {
|
||||
} catch (e: PlacesException.UrlParseFailed) {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
|
@ -490,7 +491,7 @@ class PlacesConnectionTest {
|
|||
url = "https://www.ifixit.com/News/35377/which-wireless-earbuds-are-the-least-evil",
|
||||
title = "Are All Wireless Earbuds As Evil As AirPods?",
|
||||
previewImageUrl = "https://valkyrie.cdn.ifixit.com/media/2020/02/03121341/bose_soundsport_13.jpg",
|
||||
visitType = VisitType.LINK
|
||||
visitType = VisitTransition.LINK
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -591,7 +592,7 @@ class PlacesConnectionTest {
|
|||
db.noteHistoryMetadataObservationViewTime(metaKeyBad, 200)
|
||||
assert(false) // should fail
|
||||
} catch (e: PlacesException) {
|
||||
assert(e is UrlParseFailed)
|
||||
assert(e is PlacesException.UrlParseFailed)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
[package]
|
||||
name = "places-ffi"
|
||||
edition = "2018"
|
||||
version = "0.1.0"
|
||||
authors = ["Thom Chiovoloni <tchiovoloni@mozilla.com>"]
|
||||
license = "MPL-2.0"
|
||||
|
||||
[lib]
|
||||
name = "places_ffi"
|
||||
crate-type = ["lib"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1"
|
||||
log = "0.4"
|
||||
url = "2.2"
|
||||
ffi-support = "0.4"
|
||||
lazy_static = "1.4"
|
||||
prost = "0.8"
|
||||
viaduct = { path = "../../viaduct" }
|
||||
interrupt-support = { path = "../../support/interrupt" }
|
||||
sql-support = { path = "../../support/sql" }
|
||||
sync-guid = { path = "../../support/guid" }
|
||||
types = { path = "../../support/types" }
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
|
||||
[dependencies.sync15]
|
||||
path = "../../sync15"
|
||||
|
||||
[dependencies.places]
|
||||
path = ".."
|
|
@ -1,725 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#![allow(unknown_lints)]
|
||||
#![warn(rust_2018_idioms)]
|
||||
// Let's allow these in the FFI code, since it's usually just a coincidence if
|
||||
// the closure is small.
|
||||
#![allow(clippy::redundant_closure)]
|
||||
|
||||
use ffi_support::{
|
||||
define_box_destructor, define_bytebuffer_destructor, define_handle_map_deleter,
|
||||
define_string_destructor, ByteBuffer, ExternError, FfiStr,
|
||||
};
|
||||
use places::error::*;
|
||||
pub use places::ffi::{APIS, CONNECTIONS};
|
||||
use places::msg_types::{BookmarkNodeList, SearchResultList};
|
||||
use places::storage::bookmarks;
|
||||
use places::types::VisitTransitionSet;
|
||||
use places::{storage, ConnectionType, PlacesApi};
|
||||
use sql_support::SqlInterruptHandle;
|
||||
use std::os::raw::c_char;
|
||||
use sync_guid::Guid as SyncGuid;
|
||||
|
||||
use places::api::matcher::{self, match_url, search_frecent, SearchParams};
|
||||
|
||||
// indirection to help `?` figure out the target error type
|
||||
fn parse_url(url: &str) -> places::Result<url::Url> {
|
||||
Ok(url::Url::parse(url)?)
|
||||
}
|
||||
|
||||
/// Instantiate a places API. Returned api must be freed with
|
||||
/// `places_api_destroy`. Returns null and logs on errors (for now).
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_api_new(db_path: FfiStr<'_>, error: &mut ExternError) -> u64 {
|
||||
log::debug!("places_api_new");
|
||||
APIS.insert_with_result(error, || {
|
||||
let path = db_path.as_str();
|
||||
PlacesApi::new(path)
|
||||
})
|
||||
}
|
||||
|
||||
/// Get an interrupt handle for the PlacesApi's sync connection.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_new_sync_conn_interrupt_handle(
|
||||
handle: u64,
|
||||
error: &mut ExternError,
|
||||
) -> *mut SqlInterruptHandle {
|
||||
log::debug!("places_new_sync_conn_interrupt_handle");
|
||||
APIS.call_with_result(error, handle, |api| -> places::Result<_> {
|
||||
api.new_sync_conn_interrupt_handle()
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_connection_new(
|
||||
handle: u64,
|
||||
conn_type_val: u8,
|
||||
error: &mut ExternError,
|
||||
) -> u64 {
|
||||
log::debug!("places_connection_new");
|
||||
APIS.call_with_result(error, handle, |api| -> places::Result<_> {
|
||||
let conn_type = match ConnectionType::from_primitive(conn_type_val) {
|
||||
// You can't open a sync connection using this method.
|
||||
None | Some(ConnectionType::Sync) => {
|
||||
return Err(ErrorKind::InvalidConnectionType.into());
|
||||
}
|
||||
Some(val) => val,
|
||||
};
|
||||
Ok(CONNECTIONS.insert(api.open_connection(conn_type)?))
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_bookmarks_import_from_ios(
|
||||
api_handle: u64,
|
||||
db_path: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_bookmarks_import_from_ios");
|
||||
APIS.call_with_result(error, api_handle, |api| -> places::Result<_> {
|
||||
places::import::import_ios_bookmarks(api, db_path.as_str())?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_bookmarks_import_from_fennec(
|
||||
api_handle: u64,
|
||||
db_path: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("places_bookmarks_import_from_fennec");
|
||||
APIS.call_with_result(error, api_handle, |api| -> places::Result<_> {
|
||||
let import_metrics = places::import::import_fennec_bookmarks(api, db_path.as_str())?;
|
||||
let result = serde_json::to_string(&import_metrics)?;
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_pinned_sites_import_from_fennec(
|
||||
api_handle: u64,
|
||||
db_path: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("places_pinned_sites_import_from_fennec");
|
||||
APIS.call_with_result(error, api_handle, |api| -> places::Result<_> {
|
||||
Ok(BookmarkNodeList::from(
|
||||
places::import::import_fennec_pinned_sites(api, db_path.as_str())?,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_history_import_from_fennec(
|
||||
api_handle: u64,
|
||||
db_path: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("places_history_import_from_fennec");
|
||||
APIS.call_with_result(error, api_handle, |api| -> places::Result<String> {
|
||||
let import_metrics = places::import::import_fennec_history(api, db_path.as_str())?;
|
||||
let result = serde_json::to_string(&import_metrics)?;
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
// Best effort, ignores failure.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_api_return_write_conn(
|
||||
api_handle: u64,
|
||||
write_handle: u64,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_api_return_write_conn");
|
||||
APIS.call_with_result(error, api_handle, |api| -> places::Result<_> {
|
||||
let write_conn = if let Ok(Some(conn)) = CONNECTIONS.remove_u64(write_handle) {
|
||||
conn
|
||||
} else {
|
||||
log::warn!("Can't return connection to PlacesApi because it does not exist");
|
||||
return Ok(());
|
||||
};
|
||||
if let Err(e) = api.close_connection(write_conn) {
|
||||
log::warn!("Failed to close connection: {}", e);
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the interrupt handle for a connection. Must be destroyed with
|
||||
/// `places_interrupt_handle_destroy`.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_new_interrupt_handle(
|
||||
handle: u64,
|
||||
error: &mut ExternError,
|
||||
) -> *mut SqlInterruptHandle {
|
||||
CONNECTIONS.call_with_output(error, handle, |conn| conn.new_interrupt_handle())
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_interrupt(handle: &SqlInterruptHandle, error: &mut ExternError) {
|
||||
ffi_support::call_with_output(error, || handle.interrupt())
|
||||
}
|
||||
|
||||
/// Add an observation to the database. The observation is a VisitObservation represented as JSON.
|
||||
/// Errors are logged.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_note_observation(
|
||||
handle: u64,
|
||||
json_observation: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_note_observation");
|
||||
CONNECTIONS.call_with_result_mut(error, handle, |conn| {
|
||||
let json = json_observation.as_str();
|
||||
let visit: places::VisitObservation = serde_json::from_str(json)?;
|
||||
places::api::apply_observation(conn, visit)
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute a query, returning a `Vec<SearchResult>` as a JSON string. Returned string must be freed
|
||||
/// using `places_destroy_string`. Returns null and logs on errors (for now).
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_query_autocomplete(
|
||||
handle: u64,
|
||||
search: FfiStr<'_>,
|
||||
limit: u32,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("places_query_autocomplete");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let results = search_frecent(
|
||||
conn,
|
||||
SearchParams {
|
||||
search_string: search.into_string(),
|
||||
limit,
|
||||
},
|
||||
)?
|
||||
.into_iter()
|
||||
.map(|r| r.into())
|
||||
.collect();
|
||||
Ok(SearchResultList { results })
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute a query, returning a URL string or null. Returned string must be freed
|
||||
/// using `places_destroy_string`. Returns null if no match is found.
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_match_url(
|
||||
handle: u64,
|
||||
search: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("places_match_url");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| match_url(conn, search.as_str()))
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This takes a bunch of pointers and dereferences all of them. It was written
|
||||
/// this way to avoid unnecessary overhead, but should really be rewritten to
|
||||
/// use protobufs.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn places_get_visited(
|
||||
handle: u64,
|
||||
urls: *const *const c_char,
|
||||
urls_len: i32,
|
||||
byte_buffer: *mut bool,
|
||||
byte_buffer_len: i32,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_get_visited");
|
||||
// This function has a dumb amount of overhead and copying...
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<()> {
|
||||
assert!(
|
||||
urls_len >= 0,
|
||||
"Negative array length provided to places_get_visited {}",
|
||||
urls_len
|
||||
);
|
||||
assert_eq!(byte_buffer_len, urls_len);
|
||||
let url_ptrs = std::slice::from_raw_parts(urls, urls_len as usize);
|
||||
let output = std::slice::from_raw_parts_mut(byte_buffer, byte_buffer_len as usize);
|
||||
let urls = url_ptrs
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(idx, &p)| {
|
||||
let s = FfiStr::from_raw(p).as_str();
|
||||
url::Url::parse(s).ok().map(|url| (idx, url))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
storage::history::get_visited_into(conn, &urls, output)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_get_visited_urls_in_range(
|
||||
handle: u64,
|
||||
start: i64,
|
||||
end: i64,
|
||||
include_remote: u8, // JNA has issues with bools...
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("places_get_visited_in_range");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let visited = storage::history::get_visited_urls(
|
||||
conn,
|
||||
// Probably should allow into()...
|
||||
types::Timestamp(start.max(0) as u64),
|
||||
types::Timestamp(end.max(0) as u64),
|
||||
include_remote != 0,
|
||||
)?;
|
||||
Ok(serde_json::to_string(&visited)?)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_delete_visits_for(handle: u64, url: FfiStr<'_>, error: &mut ExternError) {
|
||||
log::debug!("places_delete_visits_for");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let url = parse_url(url.as_str())?;
|
||||
let guid = match parse_url(url.as_str()) {
|
||||
Ok(url) => storage::history::url_to_guid(conn, &url)?,
|
||||
Err(e) => {
|
||||
log::warn!("Invalid URL passed to places_delete_visits_for, {}", e);
|
||||
storage::history::href_to_guid(conn, url.clone().as_str())?
|
||||
}
|
||||
};
|
||||
if let Some(guid) = guid {
|
||||
storage::history::delete_visits_for(conn, &guid)?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_delete_visits_between(
|
||||
handle: u64,
|
||||
start: i64,
|
||||
end: i64,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_delete_visits_between");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
storage::history::delete_visits_between(
|
||||
conn,
|
||||
types::Timestamp(start.max(0) as u64),
|
||||
types::Timestamp(end.max(0) as u64),
|
||||
)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_delete_visit(
|
||||
handle: u64,
|
||||
url: FfiStr<'_>,
|
||||
timestamp: i64,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_delete_visit");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
match parse_url(url.as_str()) {
|
||||
Ok(url) => {
|
||||
storage::history::delete_place_visit_at_time(
|
||||
conn,
|
||||
&url,
|
||||
types::Timestamp(timestamp.max(0) as u64),
|
||||
)?;
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Invalid URL passed to places_delete_visit, {}", e);
|
||||
storage::history::delete_place_visit_at_time_by_href(
|
||||
conn,
|
||||
url.as_str(),
|
||||
types::Timestamp(timestamp.max(0) as u64),
|
||||
)?;
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_wipe_local(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("places_wipe_local");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| storage::history::wipe_local(conn))
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_run_maintenance(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("places_run_maintenance");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| storage::run_maintenance(conn))
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_prune_destructively(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("places_prune_destructively");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| {
|
||||
storage::history::prune_destructively(conn)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_delete_everything(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("places_delete_everything");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| {
|
||||
storage::history::delete_everything(conn)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_get_top_frecent_site_infos(
|
||||
handle: u64,
|
||||
num_items: i32,
|
||||
frecency_threshold: i64,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("places_get_top_frecent_site_infos");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
storage::history::get_top_frecent_site_infos(conn, num_items, frecency_threshold)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_get_visit_infos(
|
||||
handle: u64,
|
||||
start_date: i64,
|
||||
end_date: i64,
|
||||
exclude_types: i32,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("places_get_visit_infos");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
storage::history::get_visit_infos(
|
||||
conn,
|
||||
types::Timestamp(start_date.max(0) as u64),
|
||||
types::Timestamp(end_date.max(0) as u64),
|
||||
VisitTransitionSet::from_u16(exclude_types as u16)
|
||||
.expect("Bug: Invalid VisitTransitionSet"),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_get_visit_count(
|
||||
handle: u64,
|
||||
exclude_types: i32,
|
||||
error: &mut ExternError,
|
||||
) -> i64 {
|
||||
log::debug!("places_get_visit_count");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
storage::history::get_visit_count(
|
||||
conn,
|
||||
// Note: it's a bug in our FFI android (or swift, eventually) code
|
||||
// if this expect fires.
|
||||
VisitTransitionSet::from_u16(exclude_types as u16)
|
||||
.expect("Bug: Invalid VisitTransitionSet"),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_get_visit_page(
|
||||
handle: u64,
|
||||
offset: i64,
|
||||
count: i64,
|
||||
exclude_types: i32,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("places_get_visit_page");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
storage::history::get_visit_page(
|
||||
conn,
|
||||
offset,
|
||||
count,
|
||||
// Note: it's a bug in our FFI android (or swift, eventually) code
|
||||
// if this expect fires.
|
||||
VisitTransitionSet::from_u16(exclude_types as u16)
|
||||
.expect("Bug: Invalid VisitTransitionSet"),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_get_visit_page_with_bound(
|
||||
handle: u64,
|
||||
bound: i64,
|
||||
offset: i64,
|
||||
count: i64,
|
||||
exclude_types: i32,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("places_get_visit_page_with_bound");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
storage::history::get_visit_page_with_bound(
|
||||
conn,
|
||||
bound,
|
||||
offset,
|
||||
count,
|
||||
VisitTransitionSet::from_u16(exclude_types as u16)
|
||||
.expect("Bug: Invalid VisitTransitionSet"),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_accept_result(
|
||||
handle: u64,
|
||||
search_string: FfiStr<'_>,
|
||||
url: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("places_accept_result");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let search_string = search_string.as_str();
|
||||
let url = if let Ok(url) = parse_url(url.as_str()) {
|
||||
url
|
||||
} else {
|
||||
log::warn!("Ignoring invalid URL in places_accept_result");
|
||||
return Ok(());
|
||||
};
|
||||
matcher::accept_result(conn, search_string, &url)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_api_register_with_sync_manager(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("register_with_sync_manager");
|
||||
APIS.call_with_output(error, handle, |api| {
|
||||
api.clone().register_with_sync_manager()
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn places_reset(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("places_reset");
|
||||
APIS.call_with_result(error, handle, |api| -> places::Result<_> {
|
||||
api.reset_history()?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_reset(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("bookmarks_reset");
|
||||
APIS.call_with_result(error, handle, |api| -> places::Result<_> {
|
||||
api.reset_bookmarks()?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn sync15_history_sync(
|
||||
handle: u64,
|
||||
key_id: FfiStr<'_>,
|
||||
access_token: FfiStr<'_>,
|
||||
sync_key: FfiStr<'_>,
|
||||
tokenserver_url: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("sync15_history_sync");
|
||||
APIS.call_with_result(error, handle, |api| -> places::Result<_> {
|
||||
let ping = api.sync_history(
|
||||
&sync15::Sync15StorageClientInit {
|
||||
key_id: key_id.into_string(),
|
||||
access_token: access_token.into_string(),
|
||||
tokenserver_url: parse_url(tokenserver_url.as_str())?,
|
||||
},
|
||||
&sync15::KeyBundle::from_ksync_base64(sync_key.as_str())?,
|
||||
)?;
|
||||
Ok(ping)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn sync15_bookmarks_sync(
|
||||
handle: u64,
|
||||
key_id: FfiStr<'_>,
|
||||
access_token: FfiStr<'_>,
|
||||
sync_key: FfiStr<'_>,
|
||||
tokenserver_url: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("sync15_bookmarks_sync");
|
||||
APIS.call_with_result(error, handle, |api| -> places::Result<_> {
|
||||
let ping = api.sync_bookmarks(
|
||||
&sync15::Sync15StorageClientInit {
|
||||
key_id: key_id.into_string(),
|
||||
access_token: access_token.into_string(),
|
||||
tokenserver_url: parse_url(tokenserver_url.as_str())?,
|
||||
},
|
||||
&sync15::KeyBundle::from_ksync_base64(sync_key.as_str())?,
|
||||
)?;
|
||||
Ok(ping)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_get_tree(
|
||||
handle: u64,
|
||||
guid: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("bookmarks_get_tree");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let root_id = SyncGuid::from(guid.as_str());
|
||||
bookmarks::public_node::fetch_public_tree(conn, &root_id)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_delete_everything(handle: u64, error: &mut ExternError) {
|
||||
log::debug!("bookmarks_delete_everything");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
bookmarks::delete_everything(conn)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_get_by_guid(
|
||||
handle: u64,
|
||||
guid: FfiStr<'_>,
|
||||
get_direct_children: u8,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("bookmarks_get_by_guid");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let guid = SyncGuid::from(guid.as_str());
|
||||
bookmarks::public_node::fetch_bookmark(conn, &guid, get_direct_children != 0)
|
||||
})
|
||||
}
|
||||
|
||||
unsafe fn get_buffer<'a>(data: *const u8, len: i32) -> &'a [u8] {
|
||||
assert!(len >= 0, "Bad buffer len: {}", len);
|
||||
if len == 0 {
|
||||
// This will still fail, but as a bad protobuf format.
|
||||
&[]
|
||||
} else {
|
||||
assert!(!data.is_null(), "Unexpected null data pointer");
|
||||
std::slice::from_raw_parts(data, len as usize)
|
||||
}
|
||||
}
|
||||
/// # Safety
|
||||
/// Deref pointer, thus unsafe
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn bookmarks_insert(
|
||||
handle: u64,
|
||||
data: *const u8,
|
||||
len: i32,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("bookmarks_insert");
|
||||
use places::msg_types::BookmarkNode;
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let buffer = get_buffer(data, len);
|
||||
let bookmark: BookmarkNode = prost::Message::decode(buffer)?;
|
||||
let insertable = bookmark.into_insertable()?;
|
||||
let guid = bookmarks::insert_bookmark(conn, &insertable)?;
|
||||
Ok(guid.into_string())
|
||||
})
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Deref pointer, thus unsafe
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn bookmarks_update(
|
||||
handle: u64,
|
||||
data: *const u8,
|
||||
len: i32,
|
||||
error: &mut ExternError,
|
||||
) {
|
||||
log::debug!("bookmarks_update");
|
||||
use places::msg_types::BookmarkNode;
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let buffer = get_buffer(data, len);
|
||||
let bookmark: BookmarkNode = prost::Message::decode(buffer)?;
|
||||
bookmarks::public_node::update_bookmark_from_message(conn, bookmark)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_delete(handle: u64, id: FfiStr<'_>, error: &mut ExternError) -> u8 {
|
||||
log::debug!("bookmarks_delete");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let guid = SyncGuid::from(id.as_str());
|
||||
let did_delete = bookmarks::delete_bookmark(conn, &guid)?;
|
||||
Ok(did_delete)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_get_all_with_url(
|
||||
handle: u64,
|
||||
url: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("bookmarks_get_all_with_url");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
Ok(match parse_url(url.as_str()) {
|
||||
Ok(url) => {
|
||||
BookmarkNodeList::from(bookmarks::public_node::fetch_bookmarks_by_url(conn, &url)?)
|
||||
}
|
||||
Err(e) => {
|
||||
// There are no bookmarks with the URL if it's invalid.
|
||||
log::warn!("Invalid URL passed to bookmarks_get_all_with_url, {}", e);
|
||||
BookmarkNodeList::from(Vec::<bookmarks::public_node::PublicNode>::new())
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Deref pointer, thus unsafe
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn bookmarks_get_url_for_keyword(
|
||||
handle: u64,
|
||||
keyword: FfiStr<'_>,
|
||||
error: &mut ExternError,
|
||||
) -> *mut c_char {
|
||||
log::debug!("bookmarks_get_url_for_keyword");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
let url = bookmarks::bookmarks_get_url_for_keyword(conn, keyword.as_str())?;
|
||||
Ok(url.map(String::from))
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_search(
|
||||
handle: u64,
|
||||
query: FfiStr<'_>,
|
||||
limit: i32,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("bookmarks_search");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
Ok(BookmarkNodeList::from(
|
||||
bookmarks::public_node::search_bookmarks(conn, query.as_str(), limit as u32)?,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bookmarks_get_recent(
|
||||
handle: u64,
|
||||
limit: i32,
|
||||
error: &mut ExternError,
|
||||
) -> ByteBuffer {
|
||||
log::debug!("bookmarks_get_recent");
|
||||
CONNECTIONS.call_with_result(error, handle, |conn| -> places::Result<_> {
|
||||
Ok(BookmarkNodeList::from(
|
||||
bookmarks::public_node::recent_bookmarks(conn, limit as u32)?,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
define_string_destructor!(places_destroy_string);
|
||||
define_bytebuffer_destructor!(places_destroy_bytebuffer);
|
||||
define_handle_map_deleter!(APIS, places_api_destroy);
|
||||
|
||||
define_handle_map_deleter!(CONNECTIONS, places_connection_destroy);
|
||||
define_box_destructor!(SqlInterruptHandle, places_interrupt_handle_destroy);
|
|
@ -25,9 +25,8 @@ public enum BookmarkRoots {
|
|||
])
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumeration of the type of a bookmark item.
|
||||
*/
|
||||
// Keeping `BookmarkNodeType` in the swift wrapper because the iOS code relies on the raw value of the variants of
|
||||
// this enum.
|
||||
public enum BookmarkNodeType: Int32 {
|
||||
// Note: these values need to match the Rust BookmarkType
|
||||
// enum in types.rs
|
||||
|
@ -43,7 +42,7 @@ public enum BookmarkNodeType: Int32 {
|
|||
* A base class containing the set of fields common to all nodes
|
||||
* in the bookmark tree.
|
||||
*/
|
||||
public class BookmarkNode {
|
||||
public class BookmarkNodeData {
|
||||
/**
|
||||
* The type of this bookmark.
|
||||
*/
|
||||
|
@ -94,7 +93,6 @@ public class BookmarkNode {
|
|||
}
|
||||
|
||||
// swiftformat:enable redundantFileprivate
|
||||
|
||||
/**
|
||||
* Returns true if this record is a bookmark root.
|
||||
*
|
||||
|
@ -105,13 +103,64 @@ public class BookmarkNode {
|
|||
}
|
||||
}
|
||||
|
||||
public extension BookmarkItem {
|
||||
var asBookmarkNodeData: BookmarkNodeData {
|
||||
switch self {
|
||||
case let .separator(s):
|
||||
return BookmarkSeparatorData(guid: s.guid,
|
||||
dateAdded: s.dateAdded,
|
||||
lastModified: s.lastModified,
|
||||
parentGUID: s.parentGuid,
|
||||
position: s.position)
|
||||
case let .bookmark(b):
|
||||
return BookmarkItemData(guid: b.guid,
|
||||
dateAdded: b.dateAdded,
|
||||
lastModified: b.lastModified,
|
||||
parentGUID: b.parentGuid,
|
||||
position: b.position,
|
||||
url: b.url,
|
||||
title: b.title ?? "")
|
||||
case let .folder(f):
|
||||
return BookmarkFolderData(guid: f.guid,
|
||||
dateAdded: f.dateAdded,
|
||||
lastModified: f.lastModified,
|
||||
parentGUID: f.parentGuid,
|
||||
position: f.position,
|
||||
title: f.title ?? "",
|
||||
childGUIDs: f.childGuids ?? [String](),
|
||||
children: f.childNodes?.map { child in child.asBookmarkNodeData })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// XXX - This function exists to convert the return types of the `bookmarksGetAllWithUrl`,
|
||||
// `bookmarksSearch`, and `bookmarksGetRecent` functions which will always return the `BookmarkData`
|
||||
// variant of the `BookmarkItem` enum. This function should be removed once the return types of the
|
||||
// backing rust functions have been converted from `BookmarkItem`.
|
||||
func toBookmarkItemDataList(items: [BookmarkItem]) -> [BookmarkItemData] {
|
||||
func asBookmarkItemData(item: BookmarkItem) -> BookmarkItemData? {
|
||||
if case let .bookmark(b) = item {
|
||||
return BookmarkItemData(guid: b.guid,
|
||||
dateAdded: b.dateAdded,
|
||||
lastModified: b.lastModified,
|
||||
parentGUID: b.parentGuid,
|
||||
position: b.position,
|
||||
url: b.url,
|
||||
title: b.title ?? "")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return items.map { asBookmarkItemData(item: $0)! }
|
||||
}
|
||||
|
||||
/**
|
||||
* A bookmark which is a separator.
|
||||
*
|
||||
* It's type is always `BookmarkNodeType.separator`, and it has no fields
|
||||
* besides those defined by `BookmarkNode`.
|
||||
* besides those defined by `BookmarkNodeData`.
|
||||
*/
|
||||
public class BookmarkSeparator: BookmarkNode {
|
||||
public class BookmarkSeparatorData: BookmarkNodeData {
|
||||
public init(guid: String, dateAdded: Int64, lastModified: Int64, parentGUID: String?, position: UInt32) {
|
||||
super.init(
|
||||
type: .separator,
|
||||
|
@ -128,9 +177,9 @@ public class BookmarkSeparator: BookmarkNode {
|
|||
* A bookmark tree node that actually represents a bookmark.
|
||||
*
|
||||
* It's type is always `BookmarkNodeType.bookmark`, and in addition to the
|
||||
* fields provided by `BookmarkNode`, it has a `title` and a `url`.
|
||||
* fields provided by `BookmarkNodeData`, it has a `title` and a `url`.
|
||||
*/
|
||||
public class BookmarkItem: BookmarkNode {
|
||||
public class BookmarkItemData: BookmarkNodeData {
|
||||
/**
|
||||
* The URL of this bookmark.
|
||||
*/
|
||||
|
@ -169,10 +218,10 @@ public class BookmarkItem: BookmarkNode {
|
|||
* A bookmark which is a folder.
|
||||
*
|
||||
* It's type is always `BookmarkNodeType.folder`, and in addition to the
|
||||
* fields provided by `BookmarkNode`, it has a `title`, a list of `childGUIDs`,
|
||||
* fields provided by `BookmarkNodeData`, it has a `title`, a list of `childGUIDs`,
|
||||
* and possibly a list of `children`.
|
||||
*/
|
||||
public class BookmarkFolder: BookmarkNode {
|
||||
public class BookmarkFolderData: BookmarkNodeData {
|
||||
/**
|
||||
* The title of this bookmark folder.
|
||||
*
|
||||
|
@ -194,7 +243,7 @@ public class BookmarkFolder: BookmarkNode {
|
|||
* this is a child (or grandchild, etc) of the directly returned node, then `children`
|
||||
* will *not* be present (as that is the point of `recursive = false`).
|
||||
*/
|
||||
public let children: [BookmarkNode]?
|
||||
public let children: [BookmarkNodeData]?
|
||||
|
||||
public init(guid: String,
|
||||
dateAdded: Int64,
|
||||
|
@ -203,7 +252,7 @@ public class BookmarkFolder: BookmarkNode {
|
|||
position: UInt32,
|
||||
title: String,
|
||||
childGUIDs: [String],
|
||||
children: [BookmarkNode]?)
|
||||
children: [BookmarkNodeData]?)
|
||||
{
|
||||
self.title = title
|
||||
self.childGUIDs = childGUIDs
|
||||
|
@ -218,73 +267,3 @@ public class BookmarkFolder: BookmarkNode {
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
// We pass in whether or not we expect children, because we don't have a way
|
||||
// of distinguishing 'empty folder' from 'this API does not return children'.
|
||||
internal func unpackProtobuf(msg: MsgTypes_BookmarkNode) -> BookmarkNode {
|
||||
// Should never fail unless BookmarkNodeType in this file and
|
||||
// BookmarkType in rust get out of sync
|
||||
let type = BookmarkNodeType(rawValue: msg.nodeType)!
|
||||
let guid = msg.guid
|
||||
let parentGUID = msg.parentGuid
|
||||
let position = msg.position
|
||||
let dateAdded = msg.dateAdded
|
||||
let lastModified = msg.lastModified
|
||||
|
||||
let title = msg.hasTitle ? msg.title : ""
|
||||
switch type {
|
||||
case .bookmark:
|
||||
return BookmarkItem(
|
||||
guid: guid,
|
||||
dateAdded: dateAdded,
|
||||
lastModified: lastModified,
|
||||
parentGUID: parentGUID,
|
||||
position: position,
|
||||
url: msg.url,
|
||||
title: title
|
||||
)
|
||||
case .separator:
|
||||
return BookmarkSeparator(
|
||||
guid: guid,
|
||||
dateAdded: dateAdded,
|
||||
lastModified: lastModified,
|
||||
parentGUID: parentGUID,
|
||||
position: position
|
||||
)
|
||||
case .folder:
|
||||
let childNodes = msg.childNodes.map { child in
|
||||
unpackProtobuf(msg: child)
|
||||
}
|
||||
var childGUIDs = msg.childGuids
|
||||
// We don't bother sending both the guids and the child nodes over
|
||||
// the FFI as it's redundant.
|
||||
if childGUIDs.isEmpty, !childNodes.isEmpty {
|
||||
childGUIDs = childNodes.map { node in node.guid }
|
||||
}
|
||||
let childrenExpected = msg.hasHaveChildNodes ? msg.haveChildNodes : false
|
||||
return BookmarkFolder(
|
||||
guid: guid,
|
||||
dateAdded: dateAdded,
|
||||
lastModified: lastModified,
|
||||
parentGUID: parentGUID,
|
||||
position: position,
|
||||
title: title,
|
||||
childGUIDs: childGUIDs,
|
||||
children: childrenExpected ? childNodes : nil
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
internal func unpackProtobufItemList(msg: MsgTypes_BookmarkNodeList) -> [BookmarkItem] {
|
||||
return msg.nodes.map { node in
|
||||
BookmarkItem(
|
||||
guid: node.guid,
|
||||
dateAdded: node.dateAdded,
|
||||
lastModified: node.lastModified,
|
||||
parentGUID: node.parentGuid,
|
||||
position: node.position,
|
||||
url: node.url,
|
||||
title: node.hasTitle ? node.title : ""
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
#if canImport(MozillaRustComponents)
|
||||
import MozillaRustComponents
|
||||
#endif
|
||||
|
||||
/// Indicates an error occurred while calling into the places storage layer
|
||||
public enum PlacesError: LocalizedError {
|
||||
/// This indicates an attempt to use a connection after the PlacesAPI
|
||||
/// it came from is destroyed. This indicates a usage error of this library.
|
||||
case connUseAfterAPIClosed
|
||||
|
||||
/// This is a catch-all error code used for errors not yet exposed to consumers,
|
||||
/// typically since it doesn't seem like there's a sane way for them to be handled.
|
||||
case unexpected(message: String)
|
||||
|
||||
/// The rust code implementing places storage paniced. This always indicates a bug.
|
||||
case panic(message: String)
|
||||
|
||||
/// The place we were given is invalid.
|
||||
case invalidPlace(message: String)
|
||||
|
||||
/// We failed to parse the provided URL.
|
||||
case urlParseError(message: String)
|
||||
|
||||
/// The requested operation failed because the database was busy
|
||||
/// performing operations on a separate connection to the same DB.
|
||||
case databaseBusy(message: String)
|
||||
|
||||
/// The requested operation failed because it was interrupted
|
||||
case databaseInterrupted(message: String)
|
||||
|
||||
/// The requested operation failed because the store is corrupt
|
||||
case databaseCorrupt(message: String)
|
||||
|
||||
/// Thrown on insertions and updates that specify a parent which
|
||||
/// is not a folder
|
||||
case invalidParent(message: String)
|
||||
|
||||
/// Thrown on insertions and updates that specify a GUID which
|
||||
/// does not exist.
|
||||
case noSuchItem(message: String)
|
||||
|
||||
/// Thrown on insertions and updates that attempt to insert or
|
||||
/// update a bookmark URL beyond the maximum length of
|
||||
/// 65536 bytes.
|
||||
case urlTooLong(message: String)
|
||||
|
||||
/// Thrown when attempting to update a bookmark in an illegal way,
|
||||
/// for example, trying to set the URL of a folder, the title of
|
||||
/// a separator, etc.
|
||||
case illegalChange(message: String)
|
||||
|
||||
/// Thrown when attempting to update or delete a root, or
|
||||
/// insert a new item as a child of root________.
|
||||
case cannotUpdateRoot(message: String)
|
||||
|
||||
/// Our implementation of the localizedError protocol -- (This shows up in Sentry)
|
||||
public var errorDescription: String? {
|
||||
switch self {
|
||||
case .connUseAfterAPIClosed:
|
||||
return "PlacesError.connUseAfterAPIClosed"
|
||||
case let .unexpected(message):
|
||||
return "PlacesError.unexpected: \(message)"
|
||||
case let .panic(message):
|
||||
return "PlacesError.panic: \(message)"
|
||||
case let .invalidPlace(message):
|
||||
return "PlacesError.invalidPlace: \(message)"
|
||||
case let .urlParseError(message):
|
||||
return "PlacesError.urlParseError: \(message)"
|
||||
case let .databaseBusy(message):
|
||||
return "PlacesError.databaseBusy: \(message)"
|
||||
case let .databaseInterrupted(message):
|
||||
return "PlacesError.databaseInterrupted: \(message)"
|
||||
case let .databaseCorrupt(message):
|
||||
return "PlacesError.databaseCorrupt: \(message)"
|
||||
case let .invalidParent(message):
|
||||
return "PlacesError.invalidParent: \(message)"
|
||||
case let .noSuchItem(message):
|
||||
return "PlacesError.noSuchItem: \(message)"
|
||||
case let .urlTooLong(message):
|
||||
return "PlacesError.urlTooLong: \(message)"
|
||||
case let .illegalChange(message):
|
||||
return "PlacesError.illegalChange: \(message)"
|
||||
case let .cannotUpdateRoot(message):
|
||||
return "PlacesError.cannotUpdateRoot: \(message)"
|
||||
}
|
||||
}
|
||||
|
||||
// The name is attempting to indicate that we free rustError.message if it
|
||||
// existed, and that it's a very bad idea to touch it after you call this
|
||||
// function
|
||||
static func fromConsuming(_ rustError: PlacesRustError) -> PlacesError? {
|
||||
let message = rustError.message == nil ? "" : String(freeingPlacesString: rustError.message!)
|
||||
return makeException(code: rustError.code, message: message)
|
||||
}
|
||||
|
||||
static func makeException(code: PlacesErrorCode, message: String) -> PlacesError? {
|
||||
switch code {
|
||||
case Places_NoError:
|
||||
return nil
|
||||
case Places_UrlParseError:
|
||||
return .urlParseError(message: message)
|
||||
case Places_DatabaseBusy:
|
||||
return .databaseBusy(message: message)
|
||||
case Places_DatabaseInterrupted:
|
||||
return .databaseInterrupted(message: message)
|
||||
case Places_Corrupt:
|
||||
return .databaseCorrupt(message: message)
|
||||
|
||||
case Places_InvalidPlace_InvalidParent:
|
||||
return .invalidParent(message: message)
|
||||
case Places_InvalidPlace_NoSuchItem:
|
||||
return .noSuchItem(message: message)
|
||||
case Places_InvalidPlace_UrlTooLong:
|
||||
return .urlTooLong(message: message)
|
||||
case Places_InvalidPlace_IllegalChange:
|
||||
return .illegalChange(message: message)
|
||||
case Places_InvalidPlace_CannotUpdateRoot:
|
||||
return .cannotUpdateRoot(message: message)
|
||||
|
||||
case Places_Panic:
|
||||
return .panic(message: message)
|
||||
// Note: `1` is used as a generic catch all, but we
|
||||
// might as well handle the others the same way.
|
||||
default:
|
||||
return .unexpected(message: message)
|
||||
}
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
static func tryUnwrap<T>(_ callback: (UnsafeMutablePointer<PlacesRustError>) throws -> T?) throws -> T? {
|
||||
var err = PlacesRustError(code: Places_NoError, message: nil)
|
||||
let returnedVal = try callback(&err)
|
||||
if let placesErr = PlacesError.fromConsuming(err) {
|
||||
throw placesErr
|
||||
}
|
||||
guard let result = returnedVal else {
|
||||
return nil
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
static func unwrap<T>(_ callback: (UnsafeMutablePointer<PlacesRustError>) throws -> T?) throws -> T {
|
||||
guard let result = try PlacesError.tryUnwrap(callback) else {
|
||||
throw PlacesError.unexpected(message: "Unexpected error after unwrapping")
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Same as `tryUnwrap`, but instead of erroring, just logs. Useful for cases like destructors where we
|
||||
// cannot throw.
|
||||
@discardableResult
|
||||
static func unwrapOrLog<T>(_ callback: (UnsafeMutablePointer<PlacesRustError>) throws -> T?) -> T? {
|
||||
do {
|
||||
let result = try PlacesError.tryUnwrap(callback)
|
||||
return result
|
||||
} catch let e {
|
||||
// Can't log what the error is without jumping through hoops apparently, oh well...
|
||||
os_log("Hit places error when throwing is impossible %{public}@", type: .error, "\(e)")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
static func unwrapWithUniffi<T>(_ callback: (UnsafeMutablePointer<PlacesRustError>) throws -> T?) throws -> T? {
|
||||
do {
|
||||
var err = PlacesRustError(code: Places_NoError, message: nil)
|
||||
return try callback(&err)
|
||||
} catch let errorWrapper as ErrorWrapper {
|
||||
switch errorWrapper {
|
||||
case let .Wrapped(message):
|
||||
let splitError = message.components(separatedBy: "|")
|
||||
|
||||
// If we couldn't get the right code, default to unexpected error
|
||||
let code = Int32(splitError[0]) ?? 1
|
||||
let message = splitError[1]
|
||||
throw makeException(code: PlacesErrorCode(code), message: message)!
|
||||
default:
|
||||
throw PlacesError.unexpected(message: "Unexpected Error")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
import Foundation
|
||||
#if canImport(MozillaRustComponents)
|
||||
import MozillaRustComponents
|
||||
#endif
|
||||
|
||||
extension Data {
|
||||
init(placesRustBuffer: PlacesRustBuffer) {
|
||||
self.init(bytes: placesRustBuffer.data!, count: Int(placesRustBuffer.len))
|
||||
}
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
import Foundation
|
||||
#if canImport(MozillaRustComponents)
|
||||
import MozillaRustComponents
|
||||
#endif
|
||||
|
||||
extension String {
|
||||
init(freeingPlacesString rustString: UnsafeMutablePointer<CChar>) {
|
||||
defer { places_destroy_string(rustString) }
|
||||
self.init(cString: rustString)
|
||||
}
|
||||
}
|
|
@ -11,8 +11,18 @@ import os.log
|
|||
import MozillaRustComponents
|
||||
#endif
|
||||
|
||||
internal typealias APIHandle = UInt64
|
||||
internal typealias ConnectionHandle = UInt64
|
||||
internal typealias UniffiPlacesApi = PlacesApi
|
||||
internal typealias UniffiPlacesConnection = PlacesConnection
|
||||
public typealias Url = String
|
||||
public typealias Guid = String
|
||||
|
||||
/**
|
||||
* This is specifically for throwing when there is
|
||||
* API misuse and/or connection issues with PlacesReadConnection
|
||||
*/
|
||||
public enum PlacesApiError: Error {
|
||||
case connUseAfterApiClosed
|
||||
}
|
||||
|
||||
/**
|
||||
* This is something like a places connection manager. It primarialy exists to
|
||||
|
@ -22,10 +32,11 @@ internal typealias ConnectionHandle = UInt64
|
|||
* (although it does not actually perform any pooling).
|
||||
*/
|
||||
public class PlacesAPI {
|
||||
private let handle: APIHandle
|
||||
private let writeConn: PlacesWriteConnection
|
||||
private let api: UniffiPlacesApi
|
||||
|
||||
private let queue = DispatchQueue(label: "com.mozilla.places.api")
|
||||
private let interruptHandle: InterruptHandle
|
||||
private let interruptHandle: SqlInterruptHandle
|
||||
|
||||
/**
|
||||
* Initialize a PlacesAPI
|
||||
|
@ -35,59 +46,14 @@ public class PlacesAPI {
|
|||
* - Throws: `PlacesError` if initializing the database failed.
|
||||
*/
|
||||
public init(path: String) throws {
|
||||
let handle = try PlacesError.unwrap { error in
|
||||
places_api_new(path, error)
|
||||
}
|
||||
self.handle = handle
|
||||
do {
|
||||
let writeHandle = try PlacesError.unwrap { error in
|
||||
places_connection_new(handle, Int32(PlacesConn_ReadWrite), error)
|
||||
}
|
||||
writeConn = try PlacesWriteConnection(handle: writeHandle)
|
||||
try api = placesApiNew(dbPath: path)
|
||||
|
||||
interruptHandle = InterruptHandle(ptr: try PlacesError.unwrap { error in
|
||||
places_new_sync_conn_interrupt_handle(handle, error)
|
||||
})
|
||||
let uniffiConn = try api.newConnection(connType: ConnectionType.readWrite)
|
||||
writeConn = try PlacesWriteConnection(conn: uniffiConn)
|
||||
|
||||
writeConn.api = self
|
||||
} catch let e {
|
||||
// We failed to open the write connection (or the interrupt handle),
|
||||
// even though the API was opened. This is... strange, but possible.
|
||||
// Anyway, we want to clean up our API if this happens.
|
||||
//
|
||||
// If closing the API fails, it's probably caused by the same
|
||||
// underlying problem as whatever made us fail to open the write
|
||||
// connection, so we'd rather use the first error, since it's
|
||||
// hopefully more descriptive.
|
||||
PlacesError.unwrapOrLog { error in
|
||||
places_api_destroy(handle, error)
|
||||
}
|
||||
// Note: We don't need to explicitly clean up `self.writeConn` in
|
||||
// the case that it gets opened successfully, but initializing
|
||||
// `self.interruptHandle` fails -- the `PlacesWriteConnection`
|
||||
// `deinit` should still run and do the right thing.
|
||||
throw e
|
||||
}
|
||||
}
|
||||
interruptHandle = try api.newSyncConnInterruptHandle()
|
||||
|
||||
deinit {
|
||||
// Note: we shouldn't need to queue.sync with our queue in deinit (no more references
|
||||
// exist to us), however we still need to sync with the write conn's queue, since it
|
||||
// could still be in use.
|
||||
|
||||
self.writeConn.queue.sync {
|
||||
// If the writer is still around (it should be), return it to the api.
|
||||
let writeHandle = self.writeConn.takeHandle()
|
||||
if writeHandle != 0 {
|
||||
PlacesError.unwrapOrLog { error in
|
||||
places_api_return_write_conn(self.handle, writeHandle, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PlacesError.unwrapOrLog { error in
|
||||
places_api_destroy(self.handle, error)
|
||||
}
|
||||
writeConn.api = self
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,9 +83,7 @@ public class PlacesAPI {
|
|||
*/
|
||||
open func migrateBookmarksFromBrowserDb(path: String) throws {
|
||||
try queue.sync {
|
||||
try PlacesError.unwrap { error in
|
||||
places_bookmarks_import_from_ios(handle, path, error)
|
||||
}
|
||||
try self.api.placesBookmarksImportFromIos(dbPath: path)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -130,10 +94,8 @@ public class PlacesAPI {
|
|||
*/
|
||||
open func openReader() throws -> PlacesReadConnection {
|
||||
return try queue.sync {
|
||||
let conn = try PlacesError.unwrap { error in
|
||||
places_connection_new(handle, Int32(PlacesConn_ReadOnly), error)
|
||||
}
|
||||
return try PlacesReadConnection(handle: conn, api: self)
|
||||
let uniffiConn = try api.newConnection(connType: ConnectionType.readOnly)
|
||||
return try PlacesReadConnection(conn: uniffiConn, api: self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -168,15 +130,12 @@ public class PlacesAPI {
|
|||
*/
|
||||
open func syncBookmarks(unlockInfo: SyncUnlockInfo) throws -> String {
|
||||
return try queue.sync {
|
||||
let pingStr = try PlacesError.unwrap { err in
|
||||
sync15_bookmarks_sync(handle,
|
||||
unlockInfo.kid,
|
||||
unlockInfo.fxaAccessToken,
|
||||
unlockInfo.syncKey,
|
||||
unlockInfo.tokenserverURL,
|
||||
err)
|
||||
}
|
||||
return String(freeingPlacesString: pingStr)
|
||||
return try self.api.bookmarksSync(
|
||||
keyId: unlockInfo.kid,
|
||||
accessToken: unlockInfo.fxaAccessToken,
|
||||
syncKey: unlockInfo.syncKey,
|
||||
tokenserverUrl: unlockInfo.tokenserverURL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,9 +160,7 @@ public class PlacesAPI {
|
|||
*/
|
||||
open func resetHistorySyncMetadata() throws {
|
||||
return try queue.sync {
|
||||
try PlacesError.unwrap { err in
|
||||
places_reset(handle, err)
|
||||
}
|
||||
try self.api.resetHistory()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,9 +180,7 @@ public class PlacesAPI {
|
|||
*/
|
||||
open func resetBookmarkSyncMetadata() throws {
|
||||
return try queue.sync {
|
||||
try PlacesError.unwrap { err in
|
||||
bookmarks_reset(handle, err)
|
||||
}
|
||||
return try self.api.bookmarksReset()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,40 +202,20 @@ public class PlacesAPI {
|
|||
*/
|
||||
public class PlacesReadConnection {
|
||||
fileprivate let queue = DispatchQueue(label: "com.mozilla.places.conn")
|
||||
fileprivate var handle: ConnectionHandle
|
||||
fileprivate var conn: UniffiPlacesConnection
|
||||
fileprivate weak var api: PlacesAPI?
|
||||
fileprivate let interruptHandle: InterruptHandle
|
||||
fileprivate let interruptHandle: SqlInterruptHandle
|
||||
|
||||
fileprivate init(handle: ConnectionHandle, api: PlacesAPI? = nil) throws {
|
||||
self.handle = handle
|
||||
fileprivate init(conn: UniffiPlacesConnection, api: PlacesAPI? = nil) throws {
|
||||
self.conn = conn
|
||||
self.api = api
|
||||
interruptHandle = InterruptHandle(ptr: try PlacesError.unwrap { error in
|
||||
places_new_interrupt_handle(handle, error)
|
||||
})
|
||||
interruptHandle = try self.conn.newInterruptHandle()
|
||||
}
|
||||
|
||||
// Note: caller synchronizes!
|
||||
fileprivate func checkApi() throws {
|
||||
if api == nil {
|
||||
throw PlacesError.connUseAfterAPIClosed
|
||||
}
|
||||
}
|
||||
|
||||
// Note: caller synchronizes!
|
||||
fileprivate func takeHandle() -> ConnectionHandle {
|
||||
let handle = self.handle
|
||||
self.handle = 0
|
||||
return handle
|
||||
}
|
||||
|
||||
deinit {
|
||||
// Note: don't need to queue.sync in deinit -- no more references exist to us.
|
||||
let handle = self.takeHandle()
|
||||
if handle != 0 {
|
||||
// In practice this can only fail if the rust code panics.
|
||||
PlacesError.unwrapOrLog { err in
|
||||
places_connection_destroy(handle, err)
|
||||
}
|
||||
throw PlacesApiError.connUseAfterApiClosed
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -319,23 +254,14 @@ public class PlacesReadConnection {
|
|||
* - `PlacesError.panic`: If the rust code panics while completing this
|
||||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
open func getBookmarksTree(rootGUID: String, recursive: Bool) throws -> BookmarkNode? {
|
||||
open func getBookmarksTree(rootGUID: Guid, recursive: Bool) throws -> BookmarkNodeData? {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let buffer = try PlacesError.unwrap { (error: UnsafeMutablePointer<PlacesRustError>) -> PlacesRustBuffer in
|
||||
if recursive {
|
||||
return bookmarks_get_tree(self.handle, rootGUID, error)
|
||||
} else {
|
||||
return bookmarks_get_by_guid(self.handle, rootGUID, 1, error)
|
||||
}
|
||||
if recursive {
|
||||
return try self.conn.bookmarksGetTree(itemGuid: rootGUID)?.asBookmarkNodeData
|
||||
} else {
|
||||
return try self.conn.bookmarksGetByGuid(guid: rootGUID, getDirectChildren: true)?.asBookmarkNodeData
|
||||
}
|
||||
if buffer.data == nil {
|
||||
return nil
|
||||
}
|
||||
defer { places_destroy_bytebuffer(buffer) }
|
||||
// This should never fail, since we encoded it on the other side with Rust
|
||||
let msg = try MsgTypes_BookmarkNode(serializedData: Data(placesRustBuffer: buffer))
|
||||
return unpackProtobuf(msg: msg)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -363,19 +289,10 @@ public class PlacesReadConnection {
|
|||
* - `PlacesError.panic`: If the rust code panics while completing this
|
||||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
open func getBookmark(guid: String) throws -> BookmarkNode? {
|
||||
open func getBookmark(guid: Guid) throws -> BookmarkNodeData? {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let buffer = try PlacesError.unwrap { error in
|
||||
bookmarks_get_by_guid(self.handle, guid, 0, error)
|
||||
}
|
||||
if buffer.data == nil {
|
||||
return nil
|
||||
}
|
||||
defer { places_destroy_bytebuffer(buffer) }
|
||||
// This could probably be try!
|
||||
let msg = try MsgTypes_BookmarkNode(serializedData: Data(placesRustBuffer: buffer))
|
||||
return unpackProtobuf(msg: msg)
|
||||
return try self.conn.bookmarksGetByGuid(guid: guid, getDirectChildren: false)?.asBookmarkNodeData
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -404,16 +321,11 @@ public class PlacesReadConnection {
|
|||
* - `PlacesError.panic`: If the rust code panics while completing this
|
||||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
open func getBookmarksWithURL(url: String) throws -> [BookmarkItem] {
|
||||
open func getBookmarksWithURL(url: Url) throws -> [BookmarkItemData] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let buffer = try PlacesError.unwrap { error in
|
||||
bookmarks_get_all_with_url(self.handle, url, error)
|
||||
}
|
||||
defer { places_destroy_bytebuffer(buffer) }
|
||||
// This could probably be try!
|
||||
let msg = try MsgTypes_BookmarkNodeList(serializedData: Data(placesRustBuffer: buffer))
|
||||
return unpackProtobufItemList(msg: msg)
|
||||
let items = try self.conn.bookmarksGetAllWithUrl(url: url)
|
||||
return toBookmarkItemDataList(items: items)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -435,16 +347,10 @@ public class PlacesReadConnection {
|
|||
* - `PlacesError.panic`: If the rust code panics while completing this
|
||||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
open func getBookmarkURLForKeyword(keyword: String) throws -> String? {
|
||||
open func getBookmarkURLForKeyword(keyword: String) throws -> Url? {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let maybeURL = try PlacesError.tryUnwrap { error in
|
||||
bookmarks_get_url_for_keyword(self.handle, keyword, error)
|
||||
}
|
||||
guard let url = maybeURL else {
|
||||
return nil
|
||||
}
|
||||
return String(freeingPlacesString: url)
|
||||
return try self.conn.bookmarksGetUrlForKeyword(keyword: keyword)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -471,16 +377,11 @@ public class PlacesReadConnection {
|
|||
* - `PlacesError.panic`: If the rust code panics while completing this
|
||||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
open func searchBookmarks(query: String, limit: UInt) throws -> [BookmarkItem] {
|
||||
open func searchBookmarks(query: String, limit: UInt) throws -> [BookmarkItemData] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let buffer = try PlacesError.unwrap { error in
|
||||
bookmarks_search(self.handle, query, Int32(limit), error)
|
||||
}
|
||||
defer { places_destroy_bytebuffer(buffer) }
|
||||
// This could probably be try!
|
||||
let msg = try MsgTypes_BookmarkNodeList(serializedData: Data(placesRustBuffer: buffer))
|
||||
return unpackProtobufItemList(msg: msg)
|
||||
let items = try self.conn.bookmarksSearch(query: query, limit: Int32(limit))
|
||||
return toBookmarkItemDataList(items: items)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -510,64 +411,46 @@ public class PlacesReadConnection {
|
|||
* operation. (If this occurs, please let us
|
||||
* know).
|
||||
*/
|
||||
open func getRecentBookmarks(limit: UInt) throws -> [BookmarkItem] {
|
||||
open func getRecentBookmarks(limit: UInt) throws -> [BookmarkItemData] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let buffer = try PlacesError.unwrap { error in
|
||||
bookmarks_get_recent(self.handle, Int32(limit), error)
|
||||
}
|
||||
defer { places_destroy_bytebuffer(buffer) }
|
||||
let msg = try MsgTypes_BookmarkNodeList(serializedData: Data(placesRustBuffer: buffer))
|
||||
return unpackProtobufItemList(msg: msg)
|
||||
let items = try self.conn.bookmarksGetRecent(limit: Int32(limit))
|
||||
return toBookmarkItemDataList(items: items)
|
||||
}
|
||||
}
|
||||
|
||||
open func getLatestHistoryMetadataForUrl(url: String) throws -> HistoryMetadata? {
|
||||
open func getLatestHistoryMetadataForUrl(url: Url) throws -> HistoryMetadata? {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
return try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesGetLatestHistoryMetadataForUrl(handle: Int64(self.handle), url: url)
|
||||
}
|
||||
return try self.conn.getLatestHistoryMetadataForUrl(url: url)
|
||||
}
|
||||
}
|
||||
|
||||
open func getHistoryMetadataSince(since: Int64) throws -> [HistoryMetadata] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let result = try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesGetHistoryMetadataSince(handle: Int64(self.handle), start: since)
|
||||
}
|
||||
return result ?? []
|
||||
return try self.conn.getHistoryMetadataSince(since: since)
|
||||
}
|
||||
}
|
||||
|
||||
open func getHistoryMetadataBetween(start: Int64, end: Int64) throws -> [HistoryMetadata] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let result = try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesGetHistoryMetadataBetween(handle: Int64(self.handle), start: start, end: end)
|
||||
}
|
||||
return result ?? []
|
||||
return try self.conn.getHistoryMetadataBetween(start: start, end: end)
|
||||
}
|
||||
}
|
||||
|
||||
open func getHighlights(weights: HistoryHighlightWeights, limit: Int32) throws -> [HistoryHighlight] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let result = try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesGetHistoryHighlights(handle: Int64(self.handle), weights: weights, limit: limit)
|
||||
}
|
||||
return result ?? []
|
||||
return try self.conn.getHistoryHighlights(weights: weights, limit: limit)
|
||||
}
|
||||
}
|
||||
|
||||
open func queryHistoryMetadata(query: String, limit: Int32) throws -> [HistoryMetadata] {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let result = try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesQueryHistoryMetadata(handle: Int64(self.handle), query: query, limit: limit)
|
||||
}
|
||||
return result ?? []
|
||||
return try self.conn.queryHistoryMetadata(query: query, limit: limit)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -616,9 +499,7 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
open func runMaintenance() throws {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
try PlacesError.unwrap { error in
|
||||
places_run_maintenance(self.handle, error)
|
||||
}
|
||||
try self.conn.runMaintenance()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -644,13 +525,10 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
@discardableResult
|
||||
open func deleteBookmarkNode(guid: String) throws -> Bool {
|
||||
open func deleteBookmarkNode(guid: Guid) throws -> Bool {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let resByte = try PlacesError.unwrap { error in
|
||||
bookmarks_delete(self.handle, guid, error)
|
||||
}
|
||||
return resByte != 0
|
||||
return try self.conn.bookmarksDelete(id: guid)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -682,15 +560,15 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
@discardableResult
|
||||
open func createFolder(parentGUID: String,
|
||||
open func createFolder(parentGUID: Guid,
|
||||
title: String,
|
||||
position: UInt32? = nil) throws -> String
|
||||
position: UInt32? = nil) throws -> Guid
|
||||
{
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
var msg = insertionMsg(type: .folder, parentGUID: parentGUID, position: position)
|
||||
msg.title = title
|
||||
return try doInsert(msg: msg)
|
||||
let p = position == nil ? BookmarkPosition.append : BookmarkPosition.specific(pos: position ?? 0)
|
||||
let f = InsertableBookmarkFolder(parentGuid: parentGUID, position: p, title: title, children: [])
|
||||
return try doInsert(item: InsertableBookmarkItem.folder(f: f))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -719,11 +597,12 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
@discardableResult
|
||||
open func createSeparator(parentGUID: String, position: UInt32? = nil) throws -> String {
|
||||
open func createSeparator(parentGUID: Guid, position: UInt32? = nil) throws -> Guid {
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
let msg = insertionMsg(type: .separator, parentGUID: parentGUID, position: position)
|
||||
return try doInsert(msg: msg)
|
||||
let p = position == nil ? BookmarkPosition.append : BookmarkPosition.specific(pos: position ?? 0)
|
||||
let s = InsertableBookmarkSeparator(parentGuid: parentGUID, position: p)
|
||||
return try doInsert(item: InsertableBookmarkItem.separator(s: s))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -763,16 +642,13 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
open func createBookmark(parentGUID: String,
|
||||
url: String,
|
||||
title: String?,
|
||||
position: UInt32? = nil) throws -> String
|
||||
position: UInt32? = nil) throws -> Guid
|
||||
{
|
||||
return try queue.sync {
|
||||
try self.checkApi()
|
||||
var msg = insertionMsg(type: .bookmark, parentGUID: parentGUID, position: position)
|
||||
msg.url = url
|
||||
if let t = title {
|
||||
msg.title = t
|
||||
}
|
||||
return try doInsert(msg: msg)
|
||||
let p = position == nil ? BookmarkPosition.append : BookmarkPosition.specific(pos: position ?? 0)
|
||||
let bm = InsertableBookmark(parentGuid: parentGUID, position: p, url: url, title: title)
|
||||
return try doInsert(item: InsertableBookmarkItem.bookmark(b: bm))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -827,64 +703,29 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
* - `PlacesError.panic`: If the rust code panics while completing this
|
||||
* operation. (If this occurs, please let us know).
|
||||
*/
|
||||
open func updateBookmarkNode(guid: String,
|
||||
parentGUID: String? = nil,
|
||||
open func updateBookmarkNode(guid: Guid,
|
||||
parentGUID: Guid? = nil,
|
||||
position: UInt32? = nil,
|
||||
title: String? = nil,
|
||||
url: String? = nil) throws
|
||||
url: Url? = nil) throws
|
||||
{
|
||||
try queue.sync {
|
||||
try self.checkApi()
|
||||
var msg = MsgTypes_BookmarkNode()
|
||||
msg.guid = guid
|
||||
if let parent = parentGUID {
|
||||
msg.parentGuid = parent
|
||||
}
|
||||
if let pos = position {
|
||||
msg.position = pos
|
||||
}
|
||||
if let t = title {
|
||||
msg.title = t
|
||||
}
|
||||
if let u = url {
|
||||
msg.url = u
|
||||
}
|
||||
let data = try! msg.serializedData()
|
||||
let size = Int32(data.count)
|
||||
try data.withUnsafeBytes { bytes in
|
||||
try PlacesError.unwrap { error in
|
||||
bookmarks_update(self.handle, bytes.bindMemory(to: UInt8.self).baseAddress!, size, error)
|
||||
}
|
||||
}
|
||||
let data = BookmarkUpdateInfo(
|
||||
guid: guid,
|
||||
title: title,
|
||||
url: url,
|
||||
parentGuid: parentGUID,
|
||||
position: position
|
||||
)
|
||||
try self.conn.bookmarksUpdate(data: data)
|
||||
}
|
||||
}
|
||||
|
||||
// Helper for the various creation functions.
|
||||
// Note: Caller synchronizes
|
||||
private func doInsert(msg: MsgTypes_BookmarkNode) throws -> String {
|
||||
// This can only fail if we failed to set the `type` of the msg
|
||||
let data = try! msg.serializedData()
|
||||
let size = Int32(data.count)
|
||||
return try data.withUnsafeBytes { bytes -> String in
|
||||
let idStr = try PlacesError.unwrap { error in
|
||||
bookmarks_insert(self.handle, bytes.bindMemory(to: UInt8.self).baseAddress!, size, error)
|
||||
}
|
||||
return String(freeingPlacesString: idStr)
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the boilerplate common for all insertion messages
|
||||
private func insertionMsg(type: BookmarkNodeType,
|
||||
parentGUID: String,
|
||||
position: UInt32?) -> MsgTypes_BookmarkNode
|
||||
{
|
||||
var msg = MsgTypes_BookmarkNode()
|
||||
msg.nodeType = type.rawValue
|
||||
msg.parentGuid = parentGUID
|
||||
if let pos = position {
|
||||
msg.position = pos
|
||||
}
|
||||
return msg
|
||||
private func doInsert(item: InsertableBookmarkItem) throws -> Guid {
|
||||
return try conn.bookmarksInsert(bookmark: item)
|
||||
}
|
||||
|
||||
open func noteHistoryMetadataObservation(
|
||||
|
@ -892,7 +733,7 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
) throws {
|
||||
try queue.sync {
|
||||
try self.checkApi()
|
||||
try placesNoteHistoryMetadataObservation(handle: Int64(self.handle), data: observation)
|
||||
try self.conn.noteHistoryMetadataObservation(data: observation)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -931,41 +772,18 @@ public class PlacesWriteConnection: PlacesReadConnection {
|
|||
open func deleteHistoryMetadataOlderThan(olderThan: Int64) throws {
|
||||
try queue.sync {
|
||||
try self.checkApi()
|
||||
try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesMetadataDeleteOlderThan(handle: Int64(self.handle), olderThan: olderThan)
|
||||
}
|
||||
try self.conn.metadataDeleteOlderThan(olderThan: olderThan)
|
||||
}
|
||||
}
|
||||
|
||||
open func deleteHistoryMetadata(key: HistoryMetadataKey) throws {
|
||||
try queue.sync {
|
||||
try self.checkApi()
|
||||
try PlacesError.unwrapWithUniffi { _ in
|
||||
try placesMetadataDelete(
|
||||
handle: Int64(self.handle),
|
||||
url: key.url,
|
||||
referrerUrl: key.referrerUrl,
|
||||
searchTerm: key.searchTerm
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Wrapper around rust interrupt handle.
|
||||
private class InterruptHandle {
|
||||
let ptr: OpaquePointer
|
||||
init(ptr: OpaquePointer) {
|
||||
self.ptr = ptr
|
||||
}
|
||||
|
||||
deinit {
|
||||
places_interrupt_handle_destroy(self.ptr)
|
||||
}
|
||||
|
||||
func interrupt() {
|
||||
PlacesError.unwrapOrLog { error in
|
||||
places_interrupt(self.ptr, error)
|
||||
try self.conn.metadataDelete(
|
||||
url: key.url,
|
||||
referrerUrl: key.referrerUrl,
|
||||
searchTerm: key.searchTerm
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,256 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
#pragma once
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
typedef uint64_t PlacesAPIHandle;
|
||||
typedef uint64_t PlacesConnectionHandle;
|
||||
|
||||
typedef enum PlacesErrorCode {
|
||||
Places_Panic = -1,
|
||||
Places_NoError = 0,
|
||||
Places_UnexpectedError = 1,
|
||||
Places_UrlParseError = 2,
|
||||
Places_DatabaseBusy = 3,
|
||||
Places_DatabaseInterrupted = 4,
|
||||
Places_Corrupt = 5,
|
||||
|
||||
Places_InvalidPlace_InvalidParent = 64 + 0,
|
||||
Places_InvalidPlace_NoSuchItem = 64 + 1,
|
||||
Places_InvalidPlace_UrlTooLong = 64 + 2,
|
||||
Places_InvalidPlace_IllegalChange = 64 + 3,
|
||||
Places_InvalidPlace_CannotUpdateRoot = 64 + 4,
|
||||
} PlacesErrorCode;
|
||||
|
||||
typedef struct PlacesRustError {
|
||||
PlacesErrorCode code;
|
||||
char *_Nullable message;
|
||||
} PlacesRustError;
|
||||
|
||||
typedef struct PlacesRustBuffer {
|
||||
int64_t len;
|
||||
uint8_t *_Nullable data;
|
||||
} PlacesRustBuffer;
|
||||
|
||||
typedef struct RawPlacesInterruptHandle RawPlacesInterruptHandle;
|
||||
|
||||
// Not a named enum because we need int32_t ABI in `places_connection_new`,
|
||||
// and using a named enum would be `int` (which usually is 32 bits these
|
||||
// days, but it's not guaranteed)
|
||||
enum {
|
||||
PlacesConn_ReadOnly = 1,
|
||||
PlacesConn_ReadWrite = 2,
|
||||
// Not exposed.
|
||||
// PlacesConn_Sync = 3,
|
||||
};
|
||||
|
||||
PlacesAPIHandle places_api_new(const char *_Nonnull db_path,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
|
||||
PlacesConnectionHandle places_connection_new(PlacesAPIHandle handle,
|
||||
int32_t type,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
// MARK: History APIs
|
||||
|
||||
void places_note_observation(PlacesConnectionHandle handle,
|
||||
const char *_Nonnull observation_json,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nullable places_query_autocomplete(PlacesConnectionHandle handle,
|
||||
const char *_Nonnull search,
|
||||
int32_t limit,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nullable places_match_url(PlacesConnectionHandle handle,
|
||||
const char *_Nonnull search,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_bookmarks_import_from_ios(PlacesAPIHandle handle,
|
||||
const char *_Nonnull db_path,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
// XXX we should move this to protobufs rather than port it to swift.
|
||||
// char *_Nullable places_get_visited(PlacesConnectionHandle handle,
|
||||
// char const *_Nonnull const *_Nonnull urls,
|
||||
// int32_t urls_len,
|
||||
// uint8_t *_Nonnull results,
|
||||
// int32_t results_len,
|
||||
// PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nullable places_get_visited_urls_in_range(PlacesConnectionHandle handle,
|
||||
int64_t start,
|
||||
int64_t end,
|
||||
uint8_t include_remote,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
RawPlacesInterruptHandle *_Nullable places_new_interrupt_handle(PlacesConnectionHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_interrupt(RawPlacesInterruptHandle *_Nonnull interrupt,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_delete_visits_for(PlacesConnectionHandle handle,
|
||||
const char *_Nonnull place_url,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_delete_visit(PlacesConnectionHandle handle,
|
||||
const char *_Nonnull place_url,
|
||||
int64_t visit_timestamp,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_delete_visits_between(PlacesConnectionHandle handle,
|
||||
int64_t start,
|
||||
int64_t end,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_wipe_local(PlacesConnectionHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_run_maintenance(PlacesConnectionHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_prune_destructively(PlacesConnectionHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_delete_everything(PlacesConnectionHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer places_get_visit_infos(PlacesConnectionHandle handle,
|
||||
int64_t start_date,
|
||||
int64_t end_date,
|
||||
int32_t exclude_types,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_reset(PlacesAPIHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nonnull sync15_history_sync(PlacesAPIHandle handle,
|
||||
char const *_Nonnull key_id,
|
||||
char const *_Nonnull access_token,
|
||||
char const *_Nonnull sync_key,
|
||||
char const *_Nonnull tokenserver_url,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nonnull sync15_bookmarks_sync(PlacesAPIHandle handle,
|
||||
char const *_Nonnull key_id,
|
||||
char const *_Nonnull access_token,
|
||||
char const *_Nonnull sync_key,
|
||||
char const *_Nonnull tokenserver_url,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
RawPlacesInterruptHandle *_Nullable
|
||||
places_new_sync_conn_interrupt_handle(PlacesAPIHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
// MARK: Bookmarks APIs
|
||||
|
||||
PlacesRustBuffer bookmarks_get_by_guid(PlacesConnectionHandle handle,
|
||||
char const *_Nonnull guid,
|
||||
uint8_t getDirectChildren,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer bookmarks_get_all_with_url(PlacesConnectionHandle handle,
|
||||
char const *_Nonnull url,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nullable bookmarks_get_url_for_keyword(PlacesConnectionHandle handle,
|
||||
char const *_Nonnull keyword,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer bookmarks_search(PlacesConnectionHandle handle,
|
||||
char const *_Nonnull query,
|
||||
int32_t limit,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer bookmarks_get_recent(PlacesConnectionHandle handle,
|
||||
int32_t limit,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer bookmarks_get_tree(PlacesConnectionHandle handle,
|
||||
char const *_Nullable root_guid,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
char *_Nullable bookmarks_insert(PlacesConnectionHandle handle,
|
||||
uint8_t const *_Nonnull data,
|
||||
int32_t len,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void bookmarks_update(PlacesConnectionHandle handle,
|
||||
uint8_t const *_Nonnull data,
|
||||
int32_t len,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
uint8_t bookmarks_delete(PlacesConnectionHandle handle,
|
||||
char const *_Nonnull guid_to_delete,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void bookmarks_reset(PlacesAPIHandle handle,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
// MARK: memory/lifecycle management
|
||||
|
||||
void places_api_return_write_conn(PlacesAPIHandle api,
|
||||
PlacesConnectionHandle conn,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_destroy_bytebuffer(PlacesRustBuffer bb);
|
||||
|
||||
void places_destroy_string(char const *_Nonnull s);
|
||||
|
||||
void places_interrupt_handle_destroy(RawPlacesInterruptHandle *_Nonnull handle);
|
||||
|
||||
void places_connection_destroy(PlacesConnectionHandle conn,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_api_destroy(PlacesAPIHandle api,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
// MARK: History metadata storage
|
||||
PlacesRustBuffer places_get_latest_history_metadata_for_url(
|
||||
PlacesConnectionHandle handle,
|
||||
char const *_Nonnull url,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer places_get_history_metadata_between(
|
||||
PlacesConnectionHandle handle,
|
||||
int64_t start,
|
||||
int64_t end,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer places_get_history_metadata_since(
|
||||
PlacesConnectionHandle handle,
|
||||
int64_t since,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer places_get_history_highlights(
|
||||
PlacesConnectionHandle handle,
|
||||
int32_t limit,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
PlacesRustBuffer places_query_history_metadata(
|
||||
PlacesConnectionHandle handle,
|
||||
char const *_Nonnull query,
|
||||
int32_t limit,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_note_history_metadata_observation(
|
||||
PlacesConnectionHandle handle,
|
||||
uint8_t const *_Nonnull data,
|
||||
int32_t len,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_metadata_delete_older_than(
|
||||
PlacesConnectionHandle handle,
|
||||
int64_t olderThan,
|
||||
PlacesRustError *_Nonnull out_err);
|
||||
|
||||
void places_metadata_delete(
|
||||
PlacesConnectionHandle handle,
|
||||
char const *_Nonnull url,
|
||||
char const *_Nonnull referrer_url,
|
||||
char const *_Nonnull search_term,
|
||||
PlacesRustError *_Nonnull out_err);
|
|
@ -4,8 +4,8 @@
|
|||
|
||||
use crate::db::PlacesDb;
|
||||
use crate::error::Result;
|
||||
use crate::ffi::{MatchReason as FfiMatchReason, SearchResult as FfiSearchResult};
|
||||
pub use crate::match_impl::{MatchBehavior, SearchBehavior};
|
||||
use crate::msg_types::{SearchResultMessage, SearchResultReason};
|
||||
use rusqlite::{types::ToSql, Row};
|
||||
use serde_derive::*;
|
||||
use sql_support::{maybe_log_plan, ConnExt};
|
||||
|
@ -85,7 +85,7 @@ pub fn search_frecent(conn: &PlacesDb, params: SearchParams) -> Result<Vec<Searc
|
|||
Ok(matches)
|
||||
}
|
||||
|
||||
pub fn match_url(conn: &PlacesDb, query: impl AsRef<str>) -> Result<Option<String>> {
|
||||
pub fn match_url(conn: &PlacesDb, query: impl AsRef<str>) -> Result<Option<Url>> {
|
||||
let scope = conn.begin_interrupt_scope();
|
||||
let matcher = OriginOrUrl::new(query.as_ref());
|
||||
// Note: The matcher ignores the limit argument (it's a trait method)
|
||||
|
@ -94,7 +94,7 @@ pub fn match_url(conn: &PlacesDb, query: impl AsRef<str>) -> Result<Option<Strin
|
|||
// Doing it like this lets us move the result, avoiding a copy (which almost
|
||||
// certainly doesn't matter but whatever)
|
||||
if let Some(res) = results.into_iter().next() {
|
||||
Ok(Some(res.url.into()))
|
||||
Ok(Some(res.url))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
@ -339,30 +339,26 @@ impl SearchResult {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<SearchResult> for SearchResultMessage {
|
||||
impl From<SearchResult> for FfiSearchResult {
|
||||
fn from(res: SearchResult) -> Self {
|
||||
Self {
|
||||
url: res.url.into(),
|
||||
url: res.url,
|
||||
title: res.title,
|
||||
frecency: res.frecency,
|
||||
reasons: res
|
||||
.reasons
|
||||
.into_iter()
|
||||
.map(|r| Into::<SearchResultReason>::into(r) as i32)
|
||||
.collect::<Vec<i32>>(),
|
||||
reasons: res.reasons.into_iter().map(Into::into).collect::<Vec<_>>(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MatchReason> for SearchResultReason {
|
||||
impl From<MatchReason> for FfiMatchReason {
|
||||
fn from(mr: MatchReason) -> Self {
|
||||
match mr {
|
||||
MatchReason::Keyword => SearchResultReason::Keyword,
|
||||
MatchReason::Origin => SearchResultReason::Origin,
|
||||
MatchReason::Url => SearchResultReason::Url,
|
||||
MatchReason::PreviousUse => SearchResultReason::PreviousUse,
|
||||
MatchReason::Bookmark => SearchResultReason::Bookmark,
|
||||
MatchReason::Tags(_) => SearchResultReason::Tag,
|
||||
MatchReason::Keyword => FfiMatchReason::Keyword,
|
||||
MatchReason::Origin => FfiMatchReason::Origin,
|
||||
MatchReason::Url => FfiMatchReason::UrlMatch,
|
||||
MatchReason::PreviousUse => FfiMatchReason::PreviousUse,
|
||||
MatchReason::Bookmark => FfiMatchReason::Bookmark,
|
||||
MatchReason::Tags(_) => FfiMatchReason::Tags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,6 +106,13 @@ pub struct SyncState {
|
|||
pub disk_cached_state: Cell<Option<String>>,
|
||||
}
|
||||
|
||||
/// For uniffi we need to expose our `Arc` returning constructor as a global function :(
|
||||
/// https://github.com/mozilla/uniffi-rs/pull/1063 would fix this, but got some pushback
|
||||
/// meaning we are forced into this unfortunate workaround.
|
||||
pub fn places_api_new(db_name: impl AsRef<Path>) -> Result<Arc<PlacesApi>> {
|
||||
PlacesApi::new(db_name)
|
||||
}
|
||||
|
||||
/// The entry-point to the places API. This object gives access to database
|
||||
/// connections and other helpers. It enforces that only 1 write connection
|
||||
/// can exist to the database at once.
|
||||
|
@ -452,21 +459,23 @@ impl PlacesApi {
|
|||
SqlInterruptScope::new(Arc::new(AtomicUsize::new(0)))
|
||||
}
|
||||
|
||||
// Deprecated/Broken interrupt handler method, let's try to replace it with the above methods
|
||||
// ASAP
|
||||
// Deprecated/Broken interrupt handler method
|
||||
// This should be removed as part of https://github.com/mozilla/application-services/issues/1684
|
||||
//
|
||||
// There are two issues with this one:
|
||||
// - As soon as this method returns, the sync connection will be dropped, which means the
|
||||
// SqlInterruptHandle will not work.
|
||||
// - We want the sync connection to be lazy, but we call this on initialization and force a
|
||||
// connection to be created.
|
||||
pub fn new_sync_conn_interrupt_handle(&self) -> Result<SqlInterruptHandle> {
|
||||
// We have to use Arc in the return type to be able to properly
|
||||
// pass the SqlInterruptHandle as an object through Uniffi
|
||||
pub fn new_sync_conn_interrupt_handle(&self) -> Result<Arc<SqlInterruptHandle>> {
|
||||
// Probably not necessary to lock here, since this should only get
|
||||
// called in startup.
|
||||
let _guard = self.sync_state.lock();
|
||||
let conn = self.get_sync_connection()?;
|
||||
let db = conn.lock();
|
||||
Ok(db.new_interrupt_handle())
|
||||
Ok(Arc::new(db.new_interrupt_handle()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2158,8 +2158,8 @@ mod tests {
|
|||
// Insert two local bookmarks with the same URL A (so they'll have
|
||||
// identical tags) and a third with a different URL B, but one same
|
||||
// tag as A.
|
||||
let local_bookmarks = &[
|
||||
&InsertableBookmark {
|
||||
let local_bookmarks = vec![
|
||||
InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.as_guid(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
|
@ -2169,7 +2169,7 @@ mod tests {
|
|||
title: Some("A1".into()),
|
||||
}
|
||||
.into(),
|
||||
&InsertableBookmark {
|
||||
InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Menu.as_guid(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
|
@ -2179,7 +2179,7 @@ mod tests {
|
|||
title: Some("A2".into()),
|
||||
}
|
||||
.into(),
|
||||
&InsertableBookmark {
|
||||
InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.as_guid(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
|
@ -2198,7 +2198,7 @@ mod tests {
|
|||
vec!["two", "three", "three", "four"],
|
||||
),
|
||||
];
|
||||
for bm in local_bookmarks {
|
||||
for bm in local_bookmarks.into_iter() {
|
||||
insert_bookmark(&writer, bm)?;
|
||||
}
|
||||
for (url, tags) in local_tags {
|
||||
|
@ -2556,7 +2556,7 @@ mod tests {
|
|||
// Insert local item with tagged URL.
|
||||
insert_bookmark(
|
||||
&writer,
|
||||
&InsertableBookmark {
|
||||
InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.as_guid(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
|
|
|
@ -64,9 +64,6 @@ pub enum ErrorKind {
|
|||
#[error("Illegal database path: {0:?}")]
|
||||
IllegalDatabasePath(std::path::PathBuf),
|
||||
|
||||
#[error("Protobuf decode error: {0}")]
|
||||
ProtobufDecodeError(#[from] prost::DecodeError),
|
||||
|
||||
#[error("UTF8 Error: {0}")]
|
||||
Utf8Error(#[from] std::str::Utf8Error),
|
||||
|
||||
|
@ -82,6 +79,9 @@ pub enum ErrorKind {
|
|||
InvalidMetadataObservation(InvalidMetadataObservation),
|
||||
}
|
||||
|
||||
// This defines the `Error` and `Result` types exported by this module.
|
||||
// These errors do not make it across the FFI, so can be considered "private" to the
|
||||
// Rust side of the world.
|
||||
error_support::define_error! {
|
||||
ErrorKind {
|
||||
(SyncAdapterError, sync15::Error),
|
||||
|
@ -92,7 +92,6 @@ error_support::define_error! {
|
|||
(Corruption, Corruption),
|
||||
(IoError, std::io::Error),
|
||||
(MergeError, dogear::Error),
|
||||
(ProtobufDecodeError, prost::DecodeError),
|
||||
(InterruptedError, Interrupted),
|
||||
(Utf8Error, std::str::Utf8Error),
|
||||
(OpenDatabaseError, sql_support::open_database::Error),
|
||||
|
@ -163,3 +162,151 @@ pub enum InvalidMetadataObservation {
|
|||
#[error("Observed view time is invalid (too long)")]
|
||||
ViewTimeTooLong,
|
||||
}
|
||||
|
||||
// This is the error object thrown over the FFI.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum PlacesError {
|
||||
#[error("Unexpected error: {0}")]
|
||||
UnexpectedPlacesException(String),
|
||||
|
||||
#[error("UrlParseFailed: {0}")]
|
||||
UrlParseFailed(String),
|
||||
|
||||
#[error("JsonParseFailed: {0}")]
|
||||
JsonParseFailed(String),
|
||||
|
||||
#[error("PlacesConnectionBusy error: {0}")]
|
||||
PlacesConnectionBusy(String),
|
||||
|
||||
#[error("Operation Interrupted: {0}")]
|
||||
OperationInterrupted(String),
|
||||
|
||||
/// Error indicating bookmarks corruption. If this occurs, we
|
||||
/// would appreciate reports.
|
||||
///
|
||||
/// Eventually it should be fixed up, when detected as part of
|
||||
/// `runMaintenance`.
|
||||
#[error("BookmarksCorruption error: {0}")]
|
||||
BookmarksCorruption(String),
|
||||
|
||||
/// Thrown when providing a guid referring to a non-folder as the
|
||||
/// parentGUID parameter to a create or update
|
||||
#[error("Invalid Parent: {0}")]
|
||||
InvalidParent(String),
|
||||
|
||||
/// Thrown when providing a guid to a create or update function
|
||||
/// which does not refer to a known bookmark.
|
||||
#[error("Unknown bookmark: {0}")]
|
||||
UnknownBookmarkItem(String),
|
||||
|
||||
/// Thrown when attempting to insert a URL greater than 65536 bytes
|
||||
/// (after punycoding and percent encoding).
|
||||
///
|
||||
/// Attempting to truncate the URL is difficult and subtle, and
|
||||
/// is guaranteed to result in a URL different from the one the
|
||||
/// user attempted to bookmark, and so an error is thrown instead.
|
||||
#[error("URL too long: {0}")]
|
||||
UrlTooLong(String),
|
||||
|
||||
/// Thrown when attempting to update a bookmark item in an illegal
|
||||
/// way. For example, attempting to change the URL of a bookmark
|
||||
/// folder, or update the title of a separator, etc.
|
||||
#[error("Invalid Bookmark: {0}")]
|
||||
InvalidBookmarkUpdate(String),
|
||||
|
||||
/// Thrown when:
|
||||
/// - Attempting to insert a child under BookmarkRoot.Root,
|
||||
/// - Attempting to update any of the bookmark roots.
|
||||
/// - Attempting to delete any of the bookmark roots.
|
||||
#[error("CannotUpdateRoot error: {0}")]
|
||||
CannotUpdateRoot(String),
|
||||
|
||||
#[error("Unexpected error: {0}")]
|
||||
InternalPanic(String),
|
||||
}
|
||||
|
||||
// A port of the error conversion stuff that was in ffi.rs - it turns our
|
||||
// "internal" errors into "public" ones.
|
||||
fn make_places_error(error: &Error) -> PlacesError {
|
||||
let label = error.to_string();
|
||||
let kind = error.kind();
|
||||
match kind {
|
||||
ErrorKind::InvalidPlaceInfo(info) => {
|
||||
log::error!("Invalid place info: {}", info);
|
||||
let label = info.to_string();
|
||||
match &info {
|
||||
InvalidPlaceInfo::InvalidParent(..) | InvalidPlaceInfo::UrlTooLong => {
|
||||
PlacesError::InvalidParent(label)
|
||||
}
|
||||
InvalidPlaceInfo::NoSuchGuid(..) => PlacesError::UnknownBookmarkItem(label),
|
||||
InvalidPlaceInfo::IllegalChange(..) => PlacesError::InvalidBookmarkUpdate(label),
|
||||
InvalidPlaceInfo::CannotUpdateRoot(..) => PlacesError::CannotUpdateRoot(label),
|
||||
_ => PlacesError::UnexpectedPlacesException(label),
|
||||
}
|
||||
}
|
||||
ErrorKind::UrlParseError(e) => {
|
||||
log::error!("URL parse error: {}", e);
|
||||
PlacesError::UrlParseFailed(e.to_string())
|
||||
}
|
||||
// Can't pattern match on `err` without adding a dep on the sqlite3-sys crate,
|
||||
// so we just use a `if` guard.
|
||||
ErrorKind::SqlError(rusqlite::Error::SqliteFailure(err, msg))
|
||||
if err.code == rusqlite::ErrorCode::DatabaseBusy =>
|
||||
{
|
||||
log::error!("Database busy: {:?} {:?}", err, msg);
|
||||
PlacesError::PlacesConnectionBusy(label)
|
||||
}
|
||||
ErrorKind::SqlError(rusqlite::Error::SqliteFailure(err, _))
|
||||
if err.code == rusqlite::ErrorCode::OperationInterrupted =>
|
||||
{
|
||||
log::info!("Operation interrupted");
|
||||
PlacesError::OperationInterrupted(label)
|
||||
}
|
||||
ErrorKind::InterruptedError(err) => {
|
||||
// Can't unify with the above ... :(
|
||||
log::info!("Operation interrupted");
|
||||
PlacesError::OperationInterrupted(err.to_string())
|
||||
}
|
||||
ErrorKind::Corruption(e) => {
|
||||
log::info!("The store is corrupt: {}", e);
|
||||
PlacesError::BookmarksCorruption(e.to_string())
|
||||
}
|
||||
ErrorKind::SyncAdapterError(e) => {
|
||||
use sync15::ErrorKind;
|
||||
match e.kind() {
|
||||
ErrorKind::StoreError(store_error) => {
|
||||
// If it's a type-erased version of one of our errors, try
|
||||
// and resolve it.
|
||||
if let Some(places_err) = store_error.downcast_ref::<Error>() {
|
||||
log::info!("Recursing to resolve places error");
|
||||
make_places_error(places_err)
|
||||
} else {
|
||||
log::error!("Unexpected sync error: {:?}", label);
|
||||
PlacesError::UnexpectedPlacesException(label)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
log::error!("Unexpected sync error: {:?}", label);
|
||||
PlacesError::UnexpectedPlacesException(label)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err => {
|
||||
log::error!("Unexpected error: {:?}", err);
|
||||
PlacesError::InternalPanic(label)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for PlacesError {
|
||||
fn from(e: Error) -> PlacesError {
|
||||
make_places_error(&e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for PlacesError {
|
||||
fn from(e: serde_json::Error) -> PlacesError {
|
||||
PlacesError::JsonParseFailed(format!("{}", e))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,322 +4,557 @@
|
|||
|
||||
// This module implement the traits that make the FFI code easier to manage.
|
||||
|
||||
use crate::error::{Error, ErrorKind, InvalidPlaceInfo};
|
||||
use crate::msg_types;
|
||||
use crate::api::matcher::{self, search_frecent, SearchParams};
|
||||
use crate::api::places_api::places_api_new;
|
||||
use crate::error::PlacesError;
|
||||
use crate::import::fennec::import_bookmarks;
|
||||
use crate::import::fennec::import_history;
|
||||
use crate::import::fennec::import_pinned_sites;
|
||||
use crate::storage;
|
||||
use crate::storage::bookmarks;
|
||||
use crate::storage::bookmarks::BookmarkPosition;
|
||||
use crate::storage::history_metadata::{
|
||||
DocumentType, HistoryHighlight, HistoryHighlightWeights, HistoryMetadata,
|
||||
HistoryMetadataObservation,
|
||||
};
|
||||
use crate::storage::{history, history_metadata};
|
||||
use crate::types::VisitTransitionSet;
|
||||
use crate::ConnectionType;
|
||||
use crate::VisitObservation;
|
||||
use crate::VisitTransition;
|
||||
use crate::{PlacesApi, PlacesDb};
|
||||
use ffi_support::{
|
||||
implement_into_ffi_by_delegation, implement_into_ffi_by_protobuf, ConcurrentHandleMap,
|
||||
ErrorCode, ExternError, Handle, HandleError,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use sql_support::SqlInterruptHandle;
|
||||
use std::sync::Arc;
|
||||
use sync_guid::Guid;
|
||||
use types::Timestamp;
|
||||
use url::Url;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref APIS: ConcurrentHandleMap<Arc<PlacesApi>> = ConcurrentHandleMap::new();
|
||||
pub static ref CONNECTIONS: ConcurrentHandleMap<PlacesDb> = ConcurrentHandleMap::new();
|
||||
// From https://searchfox.org/mozilla-central/rev/1674b86019a96f076e0f98f1d0f5f3ab9d4e9020/browser/components/newtab/lib/TopSitesFeed.jsm#87
|
||||
const SKIP_ONE_PAGE_FRECENCY_THRESHOLD: i64 = 101 + 1;
|
||||
|
||||
// All of our functions in this module use a `Result` type with the error we throw over
|
||||
// the FFI.
|
||||
type Result<T> = std::result::Result<T, PlacesError>;
|
||||
|
||||
// `bookmarks::InsertableItem` is clear for Rust code, but just `InsertableItem` is less
|
||||
// clear in the UDL - so change some of the type names.
|
||||
type InsertableBookmarkItem = crate::storage::bookmarks::InsertableItem;
|
||||
type InsertableBookmarkFolder = crate::storage::bookmarks::InsertableFolder;
|
||||
type InsertableBookmarkSeparator = crate::storage::bookmarks::InsertableSeparator;
|
||||
use crate::storage::bookmarks::InsertableBookmark;
|
||||
|
||||
use crate::storage::bookmarks::BookmarkUpdateInfo;
|
||||
|
||||
// And types used when fetching items.
|
||||
type BookmarkItem = crate::storage::bookmarks::fetch::Item;
|
||||
type BookmarkFolder = crate::storage::bookmarks::fetch::Folder;
|
||||
type BookmarkSeparator = crate::storage::bookmarks::fetch::Separator;
|
||||
use crate::storage::bookmarks::fetch::BookmarkData;
|
||||
|
||||
impl UniffiCustomTypeWrapper for Url {
|
||||
type Wrapped = String;
|
||||
|
||||
fn wrap(val: Self::Wrapped) -> uniffi::Result<url::Url> {
|
||||
match Url::parse(val.as_str()) {
|
||||
Ok(url) => Ok(url),
|
||||
Err(e) => Err(PlacesError::UrlParseFailed(e.to_string()).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap(obj: Self) -> Self::Wrapped {
|
||||
obj.into()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_url(url: &str) -> crate::Result<url::Url> {
|
||||
Ok(url::Url::parse(url)?)
|
||||
impl UniffiCustomTypeWrapper for Timestamp {
|
||||
type Wrapped = i64;
|
||||
|
||||
fn wrap(val: Self::Wrapped) -> uniffi::Result<Self> {
|
||||
Ok(Timestamp(val as u64))
|
||||
}
|
||||
|
||||
fn unwrap(obj: Self) -> Self::Wrapped {
|
||||
obj.as_millis() as i64
|
||||
}
|
||||
}
|
||||
|
||||
fn places_get_latest_history_metadata_for_url(
|
||||
handle: i64,
|
||||
url: String,
|
||||
) -> Result<Option<HistoryMetadata>, ErrorWrapper> {
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
let url = parse_url(url.as_str())?;
|
||||
let metadata = crate::storage::history_metadata::get_latest_for_url(conn, &url)?;
|
||||
Ok(metadata)
|
||||
},
|
||||
)
|
||||
impl UniffiCustomTypeWrapper for VisitTransitionSet {
|
||||
type Wrapped = i32;
|
||||
|
||||
fn wrap(val: Self::Wrapped) -> uniffi::Result<Self> {
|
||||
Ok(VisitTransitionSet::from_u16(val as u16).expect("Bug: Invalid VisitTransitionSet"))
|
||||
}
|
||||
|
||||
fn unwrap(obj: Self) -> Self::Wrapped {
|
||||
VisitTransitionSet::into_u16(obj) as i32
|
||||
}
|
||||
}
|
||||
|
||||
fn places_get_history_metadata_between(
|
||||
handle: i64,
|
||||
start: i64,
|
||||
end: i64,
|
||||
) -> Result<Vec<HistoryMetadata>, ErrorWrapper> {
|
||||
log::debug!("places_get_history_metadata_between");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
let between = crate::storage::history_metadata::get_between(conn, start, end)?;
|
||||
Ok(between)
|
||||
},
|
||||
)
|
||||
impl UniffiCustomTypeWrapper for Guid {
|
||||
type Wrapped = String;
|
||||
|
||||
fn wrap(val: Self::Wrapped) -> uniffi::Result<Guid> {
|
||||
Ok(Guid::new(val.as_str()))
|
||||
}
|
||||
|
||||
fn unwrap(obj: Self) -> Self::Wrapped {
|
||||
obj.into()
|
||||
}
|
||||
}
|
||||
|
||||
fn places_get_history_metadata_since(
|
||||
handle: i64,
|
||||
start: i64,
|
||||
) -> Result<Vec<HistoryMetadata>, ErrorWrapper> {
|
||||
log::debug!("places_get_history_metadata_since");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
let since = crate::storage::history_metadata::get_since(conn, start)?;
|
||||
Ok(since)
|
||||
},
|
||||
)
|
||||
impl PlacesApi {
|
||||
fn new_connection(&self, conn_type: ConnectionType) -> Result<Arc<PlacesConnection>> {
|
||||
let db = self.open_connection(conn_type)?;
|
||||
let connection = PlacesConnection { db: Mutex::new(db) };
|
||||
Ok(Arc::new(connection))
|
||||
}
|
||||
|
||||
// NOTE: These methods are unused on Android but will remain needed for
|
||||
// iOS until we can move them to the sync manager and replace their existing
|
||||
// sync engines with ours
|
||||
fn history_sync(
|
||||
&self,
|
||||
key_id: String,
|
||||
access_token: String,
|
||||
sync_key: String,
|
||||
tokenserver_url: Url,
|
||||
) -> Result<String> {
|
||||
let root_sync_key = match sync15::KeyBundle::from_ksync_base64(sync_key.as_str()) {
|
||||
Ok(key) => Ok(key),
|
||||
Err(err) => Err(PlacesError::UnexpectedPlacesException(err.to_string())),
|
||||
}?;
|
||||
let ping = self.sync_history(
|
||||
&sync15::Sync15StorageClientInit {
|
||||
key_id,
|
||||
access_token,
|
||||
tokenserver_url,
|
||||
},
|
||||
&root_sync_key,
|
||||
)?;
|
||||
Ok(serde_json::to_string(&ping).unwrap())
|
||||
}
|
||||
|
||||
fn bookmarks_sync(
|
||||
&self,
|
||||
key_id: String,
|
||||
access_token: String,
|
||||
sync_key: String,
|
||||
tokenserver_url: Url,
|
||||
) -> Result<String> {
|
||||
let root_sync_key = match sync15::KeyBundle::from_ksync_base64(sync_key.as_str()) {
|
||||
Ok(key) => Ok(key),
|
||||
Err(err) => Err(PlacesError::UnexpectedPlacesException(err.to_string())),
|
||||
}?;
|
||||
let ping = self.sync_bookmarks(
|
||||
&sync15::Sync15StorageClientInit {
|
||||
key_id,
|
||||
access_token,
|
||||
tokenserver_url,
|
||||
},
|
||||
&root_sync_key,
|
||||
)?;
|
||||
Ok(serde_json::to_string(&ping).unwrap())
|
||||
}
|
||||
|
||||
fn places_pinned_sites_import_from_fennec(&self, db_path: String) -> Result<Vec<BookmarkItem>> {
|
||||
let sites = import_pinned_sites(self, db_path.as_str())?
|
||||
.into_iter()
|
||||
.map(BookmarkItem::from)
|
||||
.collect();
|
||||
Ok(sites)
|
||||
}
|
||||
|
||||
fn places_history_import_from_fennec(&self, db_path: String) -> Result<String> {
|
||||
let metrics = import_history(self, db_path.as_str())?;
|
||||
Ok(serde_json::to_string(&metrics)?)
|
||||
}
|
||||
|
||||
fn places_bookmarks_import_from_fennec(&self, db_path: String) -> Result<String> {
|
||||
let metrics = import_bookmarks(self, db_path.as_str())?;
|
||||
Ok(serde_json::to_string(&metrics)?)
|
||||
}
|
||||
|
||||
fn places_bookmarks_import_from_ios(&self, db_path: String) -> Result<()> {
|
||||
import_bookmarks(self, db_path.as_str())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn bookmarks_reset(&self) -> Result<()> {
|
||||
self.reset_bookmarks()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn places_query_history_metadata(
|
||||
handle: i64,
|
||||
query: String,
|
||||
limit: i32,
|
||||
) -> Result<Vec<HistoryMetadata>, ErrorWrapper> {
|
||||
log::debug!("places_get_history_metadata_since");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
let metadata = crate::storage::history_metadata::query(conn, query.as_str(), limit)?;
|
||||
Ok(metadata)
|
||||
},
|
||||
)
|
||||
pub struct PlacesConnection {
|
||||
db: Mutex<PlacesDb>,
|
||||
}
|
||||
|
||||
fn places_get_history_highlights(
|
||||
handle: i64,
|
||||
weights: HistoryHighlightWeights,
|
||||
limit: i32,
|
||||
) -> Result<Vec<HistoryHighlight>, ErrorWrapper> {
|
||||
log::debug!("places_get_history_highlights");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
let highlights =
|
||||
crate::storage::history_metadata::get_highlights(conn, weights, limit)?;
|
||||
Ok(highlights)
|
||||
},
|
||||
)
|
||||
}
|
||||
impl PlacesConnection {
|
||||
// A helper that gets the connection from the mutex and converts errors.
|
||||
fn with_conn<F, T>(&self, f: F) -> Result<T>
|
||||
where
|
||||
F: FnOnce(&PlacesDb) -> crate::error::Result<T>,
|
||||
{
|
||||
let conn = self.db.lock();
|
||||
Ok(f(&conn)?)
|
||||
}
|
||||
|
||||
fn places_note_history_metadata_observation(
|
||||
handle: i64,
|
||||
data: HistoryMetadataObservation,
|
||||
) -> Result<(), ErrorWrapper> {
|
||||
log::debug!("places_note_history_metadata_observation");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
crate::storage::history_metadata::apply_metadata_observation(conn, data)?;
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
// This should be refactored/removed as part of https://github.com/mozilla/application-services/issues/1684
|
||||
// We have to use Arc in the return type to be able to properly
|
||||
// pass the SqlInterruptHandle as an object through Uniffi
|
||||
fn new_interrupt_handle(&self) -> Result<Arc<SqlInterruptHandle>> {
|
||||
Ok(Arc::new(
|
||||
self.with_conn(|conn| Ok(conn.new_interrupt_handle()))?,
|
||||
))
|
||||
}
|
||||
|
||||
fn places_metadata_delete_older_than(handle: i64, older_than: i64) -> Result<(), ErrorWrapper> {
|
||||
log::debug!("places_note_history_metadata_observation");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
crate::storage::history_metadata::delete_older_than(conn, older_than)?;
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
fn get_latest_history_metadata_for_url(&self, url: Url) -> Result<Option<HistoryMetadata>> {
|
||||
self.with_conn(|conn| history_metadata::get_latest_for_url(conn, &url))
|
||||
}
|
||||
|
||||
fn places_metadata_delete(
|
||||
handle: i64,
|
||||
url: String,
|
||||
referrer_url: Option<String>,
|
||||
search_term: Option<String>,
|
||||
) -> Result<(), ErrorWrapper> {
|
||||
log::debug!("places_metadata_delete_metadata");
|
||||
CONNECTIONS.get(
|
||||
Handle::from_u64(handle as u64)?,
|
||||
|conn| -> Result<_, ErrorWrapper> {
|
||||
crate::storage::history_metadata::delete_metadata(
|
||||
fn get_history_metadata_between(
|
||||
&self,
|
||||
start: Timestamp,
|
||||
end: Timestamp,
|
||||
) -> Result<Vec<HistoryMetadata>> {
|
||||
self.with_conn(|conn| {
|
||||
history_metadata::get_between(conn, start.as_millis_i64(), end.as_millis_i64())
|
||||
})
|
||||
}
|
||||
|
||||
fn get_history_metadata_since(&self, start: Timestamp) -> Result<Vec<HistoryMetadata>> {
|
||||
self.with_conn(|conn| history_metadata::get_since(conn, start.as_millis_i64()))
|
||||
}
|
||||
|
||||
fn query_history_metadata(&self, query: String, limit: i32) -> Result<Vec<HistoryMetadata>> {
|
||||
self.with_conn(|conn| history_metadata::query(conn, query.as_str(), limit))
|
||||
}
|
||||
|
||||
fn get_history_highlights(
|
||||
&self,
|
||||
weights: HistoryHighlightWeights,
|
||||
limit: i32,
|
||||
) -> Result<Vec<HistoryHighlight>> {
|
||||
self.with_conn(|conn| history_metadata::get_highlights(conn, weights, limit))
|
||||
}
|
||||
|
||||
fn note_history_metadata_observation(&self, data: HistoryMetadataObservation) -> Result<()> {
|
||||
// odd historical naming discrepency - public function is "note_*", impl is "apply_*"
|
||||
self.with_conn(|conn| history_metadata::apply_metadata_observation(conn, data))
|
||||
}
|
||||
|
||||
fn metadata_delete_older_than(&self, older_than: Timestamp) -> Result<()> {
|
||||
self.with_conn(|conn| history_metadata::delete_older_than(conn, older_than.as_millis_i64()))
|
||||
}
|
||||
|
||||
fn metadata_delete(
|
||||
&self,
|
||||
url: Url,
|
||||
referrer_url: Option<Url>,
|
||||
search_term: Option<String>,
|
||||
) -> Result<()> {
|
||||
self.with_conn(|conn| {
|
||||
history_metadata::delete_metadata(
|
||||
conn,
|
||||
url.as_str(),
|
||||
referrer_url.as_deref(),
|
||||
&url,
|
||||
referrer_url.as_ref(),
|
||||
search_term.as_deref(),
|
||||
)?;
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Add an observation to the database.
|
||||
fn apply_observation(&self, visit: VisitObservation) -> Result<()> {
|
||||
self.with_conn(|conn| history::apply_observation(conn, visit))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_visited_urls_in_range(
|
||||
&self,
|
||||
start: Timestamp,
|
||||
end: Timestamp,
|
||||
include_remote: bool,
|
||||
) -> Result<Vec<Url>> {
|
||||
self.with_conn(|conn| {
|
||||
let urls = history::get_visited_urls(conn, start, end, include_remote)?
|
||||
.iter()
|
||||
// Turn the list of strings into valid Urls
|
||||
.filter_map(|s| Url::parse(s).ok())
|
||||
.collect::<Vec<_>>();
|
||||
Ok(urls)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_visit_infos(
|
||||
&self,
|
||||
start_date: Timestamp,
|
||||
end_date: Timestamp,
|
||||
exclude_types: VisitTransitionSet,
|
||||
) -> Result<Vec<HistoryVisitInfo>> {
|
||||
self.with_conn(|conn| history::get_visit_infos(conn, start_date, end_date, exclude_types))
|
||||
}
|
||||
|
||||
fn get_visit_count(&self, exclude_types: VisitTransitionSet) -> Result<i64> {
|
||||
self.with_conn(|conn| history::get_visit_count(conn, exclude_types))
|
||||
}
|
||||
|
||||
fn get_visit_page(
|
||||
&self,
|
||||
offset: i64,
|
||||
count: i64,
|
||||
exclude_types: VisitTransitionSet,
|
||||
) -> Result<Vec<HistoryVisitInfo>> {
|
||||
self.with_conn(|conn| history::get_visit_page(conn, offset, count, exclude_types))
|
||||
}
|
||||
|
||||
fn get_visit_page_with_bound(
|
||||
&self,
|
||||
bound: i64,
|
||||
offset: i64,
|
||||
count: i64,
|
||||
exclude_types: VisitTransitionSet,
|
||||
) -> Result<HistoryVisitInfosWithBound> {
|
||||
self.with_conn(|conn| {
|
||||
history::get_visit_page_with_bound(conn, bound, offset, count, exclude_types)
|
||||
})
|
||||
}
|
||||
|
||||
// This is identical to get_visited in history.rs but takes a list of strings instead of urls
|
||||
// This is necessary b/c we still need to return 'false' for bad URLs which prevents us from
|
||||
// parsing/filtering them before reaching the history layer
|
||||
fn get_visited(&self, urls: Vec<String>) -> Result<Vec<bool>> {
|
||||
let iter = urls.into_iter();
|
||||
let mut result = vec![false; iter.len()];
|
||||
let url_idxs = iter
|
||||
.enumerate()
|
||||
.filter_map(|(idx, s)| Url::parse(&s).ok().map(|url| (idx, url)))
|
||||
.collect::<Vec<_>>();
|
||||
self.with_conn(|conn| history::get_visited_into(conn, &url_idxs, &mut result))?;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn delete_visits_for(&self, url: String) -> Result<()> {
|
||||
self.with_conn(|conn| {
|
||||
let guid = match Url::parse(&url) {
|
||||
Ok(url) => history::url_to_guid(conn, &url)?,
|
||||
Err(e) => {
|
||||
log::warn!("Invalid URL passed to places_delete_visits_for, {}", e);
|
||||
history::href_to_guid(conn, url.clone().as_str())?
|
||||
}
|
||||
};
|
||||
if let Some(guid) = guid {
|
||||
history::delete_visits_for(conn, &guid)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub mod error_codes {
|
||||
// Note: 0 (success) and -1 (panic) are reserved by ffi_support
|
||||
fn delete_visits_between(&self, start: Timestamp, end: Timestamp) -> Result<()> {
|
||||
self.with_conn(|conn| history::delete_visits_between(conn, start, end))
|
||||
}
|
||||
|
||||
/// An unexpected error occurred which likely cannot be meaningfully handled
|
||||
/// by the application.
|
||||
pub const UNEXPECTED: i32 = 1;
|
||||
|
||||
/// A URL was provided that we failed to parse
|
||||
pub const URL_PARSE_ERROR: i32 = 2;
|
||||
|
||||
/// The requested operation failed because the database was busy
|
||||
/// performing operations on a separate connection to the same DB.
|
||||
pub const DATABASE_BUSY: i32 = 3;
|
||||
|
||||
/// The requested operation failed because it was interrupted
|
||||
pub const DATABASE_INTERRUPTED: i32 = 4;
|
||||
|
||||
/// The requested operation failed because the store is corrupt
|
||||
pub const DATABASE_CORRUPT: i32 = 5;
|
||||
|
||||
// Skip a bunch of spaces to make it clear these are part of a group,
|
||||
// even as more and more errors get added. We're only exposing the
|
||||
// InvalidPlaceInfo items that can actually be triggered, the others
|
||||
// (if they happen accidentally) will come through as unexpected.
|
||||
|
||||
/// `InvalidParent`: Attempt to add a child to a non-folder.
|
||||
pub const INVALID_PLACE_INFO_INVALID_PARENT: i32 = 64;
|
||||
|
||||
/// `NoItem`: The GUID provided does not exist.
|
||||
pub const INVALID_PLACE_INFO_NO_ITEM: i32 = 64 + 1;
|
||||
|
||||
/// `UrlTooLong`: The provided URL cannot be inserted, as it is over the
|
||||
/// maximum URL length.
|
||||
pub const INVALID_PLACE_INFO_URL_TOO_LONG: i32 = 64 + 2;
|
||||
|
||||
/// `IllegalChange`: Attempt to change a property on a bookmark node that
|
||||
/// cannot have that property. E.g. trying to edit the URL of a folder,
|
||||
/// title of a separator, etc.
|
||||
pub const INVALID_PLACE_INFO_ILLEGAL_CHANGE: i32 = 64 + 3;
|
||||
|
||||
/// `CannotUpdateRoot`: Attempt to modify a root in a way that is illegal, e.g. adding a child
|
||||
/// to root________, updating properties of a root, deleting a root, etc.
|
||||
pub const INVALID_PLACE_INFO_CANNOT_UPDATE_ROOT: i32 = 64 + 4;
|
||||
}
|
||||
|
||||
fn get_code(err: &Error) -> ErrorCode {
|
||||
ErrorCode::new(get_error_number(err))
|
||||
}
|
||||
|
||||
fn get_error_number(err: &Error) -> i32 {
|
||||
match err.kind() {
|
||||
ErrorKind::InvalidPlaceInfo(info) => {
|
||||
log::error!("Invalid place info: {}", info);
|
||||
match &info {
|
||||
InvalidPlaceInfo::InvalidParent(..) => {
|
||||
error_codes::INVALID_PLACE_INFO_INVALID_PARENT
|
||||
fn delete_visit(&self, url: String, timestamp: Timestamp) -> Result<()> {
|
||||
self.with_conn(|conn| {
|
||||
match Url::parse(&url) {
|
||||
Ok(url) => {
|
||||
history::delete_place_visit_at_time(conn, &url, timestamp)?;
|
||||
}
|
||||
InvalidPlaceInfo::NoSuchGuid(..) => error_codes::INVALID_PLACE_INFO_NO_ITEM,
|
||||
InvalidPlaceInfo::UrlTooLong => error_codes::INVALID_PLACE_INFO_INVALID_PARENT,
|
||||
InvalidPlaceInfo::IllegalChange(..) => {
|
||||
error_codes::INVALID_PLACE_INFO_ILLEGAL_CHANGE
|
||||
Err(e) => {
|
||||
log::warn!("Invalid URL passed to places_delete_visit, {}", e);
|
||||
history::delete_place_visit_at_time_by_href(conn, url.as_str(), timestamp)?;
|
||||
}
|
||||
InvalidPlaceInfo::CannotUpdateRoot(..) => {
|
||||
error_codes::INVALID_PLACE_INFO_CANNOT_UPDATE_ROOT
|
||||
}
|
||||
_ => error_codes::UNEXPECTED,
|
||||
}
|
||||
}
|
||||
ErrorKind::UrlParseError(e) => {
|
||||
log::error!("URL parse error: {}", e);
|
||||
error_codes::URL_PARSE_ERROR
|
||||
}
|
||||
// Can't pattern match on `err` without adding a dep on the sqlite3-sys crate,
|
||||
// so we just use a `if` guard.
|
||||
ErrorKind::SqlError(rusqlite::Error::SqliteFailure(err, msg))
|
||||
if err.code == rusqlite::ErrorCode::DatabaseBusy =>
|
||||
{
|
||||
log::error!("Database busy: {:?} {:?}", err, msg);
|
||||
error_codes::DATABASE_BUSY
|
||||
}
|
||||
ErrorKind::SqlError(rusqlite::Error::SqliteFailure(err, _))
|
||||
if err.code == rusqlite::ErrorCode::OperationInterrupted =>
|
||||
{
|
||||
log::info!("Operation interrupted");
|
||||
error_codes::DATABASE_INTERRUPTED
|
||||
}
|
||||
ErrorKind::InterruptedError(_) => {
|
||||
// Can't unify with the above ... :(
|
||||
log::info!("Operation interrupted");
|
||||
error_codes::DATABASE_INTERRUPTED
|
||||
}
|
||||
ErrorKind::Corruption(e) => {
|
||||
log::info!("The store is corrupt: {}", e);
|
||||
error_codes::DATABASE_CORRUPT
|
||||
}
|
||||
ErrorKind::SyncAdapterError(e) => {
|
||||
use sync15::ErrorKind;
|
||||
match e.kind() {
|
||||
ErrorKind::StoreError(store_error) => {
|
||||
// If it's a type-erased version of one of our errors, try
|
||||
// and resolve it.
|
||||
if let Some(places_err) = store_error.downcast_ref::<Error>() {
|
||||
log::info!("Recursing to resolve places error");
|
||||
get_error_number(places_err)
|
||||
} else {
|
||||
log::error!("Unexpected sync error: {:?}", err);
|
||||
error_codes::UNEXPECTED
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO: expose network errors...
|
||||
log::error!("Unexpected sync error: {:?}", err);
|
||||
error_codes::UNEXPECTED
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
err => {
|
||||
log::error!("Unexpected error: {:?}", err);
|
||||
error_codes::UNEXPECTED
|
||||
}
|
||||
fn get_top_frecent_site_infos(
|
||||
&self,
|
||||
num_items: i32,
|
||||
threshold_option: FrecencyThresholdOption,
|
||||
) -> Result<Vec<TopFrecentSiteInfo>> {
|
||||
self.with_conn(|conn| {
|
||||
crate::storage::history::get_top_frecent_site_infos(
|
||||
conn,
|
||||
num_items,
|
||||
threshold_option.value(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// XXX - We probably need to document/name this a little better as it's specifically for
|
||||
// history and NOT bookmarks...
|
||||
fn wipe_local_history(&self) -> Result<()> {
|
||||
self.with_conn(history::wipe_local)
|
||||
}
|
||||
|
||||
// Calls wipe_local_history but also updates the
|
||||
// sync metadata to only sync after most recent visit to prevent
|
||||
// further syncing of older data
|
||||
fn delete_everything_history(&self) -> Result<()> {
|
||||
self.with_conn(history::delete_everything)
|
||||
}
|
||||
|
||||
// XXX - This just calls wipe_local under the hood...
|
||||
// should probably have this go away?
|
||||
fn prune_destructively(&self) -> Result<()> {
|
||||
self.with_conn(history::prune_destructively)
|
||||
}
|
||||
|
||||
fn run_maintenance(&self) -> Result<()> {
|
||||
self.with_conn(storage::run_maintenance)
|
||||
}
|
||||
|
||||
fn query_autocomplete(&self, search: String, limit: i32) -> Result<Vec<SearchResult>> {
|
||||
self.with_conn(|conn| {
|
||||
search_frecent(
|
||||
conn,
|
||||
SearchParams {
|
||||
search_string: search,
|
||||
limit: limit as u32,
|
||||
},
|
||||
)
|
||||
.map(|search_results| search_results.into_iter().map(Into::into).collect())
|
||||
})
|
||||
}
|
||||
|
||||
fn accept_result(&self, search_string: String, url: Url) -> Result<()> {
|
||||
self.with_conn(|conn| matcher::accept_result(conn, &search_string, &url))
|
||||
}
|
||||
|
||||
fn match_url(&self, query: String) -> Result<Option<Url>> {
|
||||
self.with_conn(|conn| matcher::match_url(conn, query))
|
||||
}
|
||||
|
||||
fn bookmarks_get_tree(&self, item_guid: &Guid) -> Result<Option<BookmarkItem>> {
|
||||
self.with_conn(|conn| bookmarks::fetch::fetch_tree(conn, item_guid))
|
||||
}
|
||||
|
||||
fn bookmarks_get_by_guid(
|
||||
&self,
|
||||
guid: &Guid,
|
||||
get_direct_children: bool,
|
||||
) -> Result<Option<BookmarkItem>> {
|
||||
self.with_conn(|conn| {
|
||||
let bookmark = bookmarks::fetch::fetch_bookmark(conn, guid, get_direct_children)?;
|
||||
Ok(bookmark.map(BookmarkItem::from))
|
||||
})
|
||||
}
|
||||
|
||||
fn bookmarks_get_all_with_url(&self, url: Url) -> Result<Vec<BookmarkItem>> {
|
||||
self.with_conn(|conn| {
|
||||
// XXX - We should return the exact type - ie, BookmarkData rather than BookmarkItem.
|
||||
Ok(bookmarks::fetch::fetch_bookmarks_by_url(conn, &url)?
|
||||
.into_iter()
|
||||
.map(|b| BookmarkItem::Bookmark { b })
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
fn bookmarks_search(&self, query: String, limit: i32) -> Result<Vec<BookmarkItem>> {
|
||||
self.with_conn(|conn| {
|
||||
// XXX - We should return the exact type - ie, BookmarkData rather than BookmarkItem.
|
||||
Ok(
|
||||
bookmarks::fetch::search_bookmarks(conn, query.as_str(), limit as u32)?
|
||||
.into_iter()
|
||||
.map(|b| BookmarkItem::Bookmark { b })
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn bookmarks_get_recent(&self, limit: i32) -> Result<Vec<BookmarkItem>> {
|
||||
self.with_conn(|conn| {
|
||||
// XXX - We should return the exact type - ie, BookmarkData rather than BookmarkItem.
|
||||
Ok(bookmarks::fetch::recent_bookmarks(conn, limit as u32)?
|
||||
.into_iter()
|
||||
.map(|b| BookmarkItem::Bookmark { b })
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
fn bookmarks_delete(&self, id: Guid) -> Result<bool> {
|
||||
self.with_conn(|conn| bookmarks::delete_bookmark(conn, &id))
|
||||
}
|
||||
|
||||
fn bookmarks_delete_everything(&self) -> Result<()> {
|
||||
self.with_conn(bookmarks::delete_everything)
|
||||
}
|
||||
|
||||
fn bookmarks_get_url_for_keyword(&self, keyword: String) -> Result<Option<Url>> {
|
||||
self.with_conn(|conn| bookmarks::bookmarks_get_url_for_keyword(conn, keyword.as_str()))
|
||||
}
|
||||
|
||||
fn bookmarks_insert(&self, data: InsertableBookmarkItem) -> Result<Guid> {
|
||||
self.with_conn(|conn| bookmarks::insert_bookmark(conn, data))
|
||||
}
|
||||
|
||||
fn bookmarks_update(&self, item: BookmarkUpdateInfo) -> Result<()> {
|
||||
self.with_conn(|conn| bookmarks::update_bookmark_from_info(conn, item))
|
||||
}
|
||||
}
|
||||
|
||||
/// This is very very hacky - we somehow need to ensure the same error hierarchy
|
||||
/// exists for both hand-written FFI functions and those generated by uniffi,
|
||||
/// and there doesn't seem to be a clean way of doing that. So our .udl defines
|
||||
/// a single error type - ErrorWrapper::Wrapped(). The `String` message there
|
||||
/// is, roughly, `format!("{}|{}", extern_error.code, extern_error.message)`.
|
||||
/// There then exists code on the Swift and Kotlin side of the world which
|
||||
/// unpacks this and returns the exact same error objects as if it was an
|
||||
/// `ExternError` in the first place.
|
||||
#[derive(Debug)]
|
||||
pub enum ErrorWrapper {
|
||||
Wrapped(String),
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct HistoryVisitInfo {
|
||||
pub url: Url,
|
||||
pub title: Option<String>,
|
||||
pub timestamp: Timestamp,
|
||||
pub visit_type: VisitTransition,
|
||||
pub is_hidden: bool,
|
||||
pub preview_image_url: Option<Url>,
|
||||
}
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct HistoryVisitInfosWithBound {
|
||||
pub infos: Vec<HistoryVisitInfo>,
|
||||
pub bound: i64,
|
||||
pub offset: i64,
|
||||
}
|
||||
|
||||
impl ToString for ErrorWrapper {
|
||||
fn to_string(&self) -> String {
|
||||
pub struct TopFrecentSiteInfo {
|
||||
pub url: Url,
|
||||
pub title: Option<String>,
|
||||
}
|
||||
|
||||
pub enum FrecencyThresholdOption {
|
||||
None,
|
||||
SkipOneTimePages,
|
||||
}
|
||||
|
||||
impl FrecencyThresholdOption {
|
||||
fn value(&self) -> i64 {
|
||||
match self {
|
||||
ErrorWrapper::Wrapped(e) => e.to_string(),
|
||||
FrecencyThresholdOption::None => 0,
|
||||
FrecencyThresholdOption::SkipOneTimePages => SKIP_ONE_PAGE_FRECENCY_THRESHOLD,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for ErrorWrapper {
|
||||
fn from(e: Error) -> ErrorWrapper {
|
||||
ErrorWrapper::Wrapped(format!("{}|{}", get_error_number(&e), e.to_string()))
|
||||
}
|
||||
// We define those types to cross the FFI
|
||||
// a better approach would be to:
|
||||
// - Rename the `Url` in the internal MatchReason to have a different name
|
||||
// This is because `uniffi` fails to parse the UDL if an enum variant
|
||||
// shadows a type, in this case, the wrapped type `Url`.
|
||||
// look at: https://github.com/mozilla/uniffi-rs/issues/1137
|
||||
// - Fix the mismatch between the consumers and the rust layer with the Tags
|
||||
// variant in the internal MatchReason, the rust layer uses a
|
||||
// variant with associated data, the kotlin layers assumes a flat enum.
|
||||
pub struct SearchResult {
|
||||
pub url: Url,
|
||||
pub title: String,
|
||||
pub frecency: i64,
|
||||
pub reasons: Vec<MatchReason>,
|
||||
}
|
||||
|
||||
impl From<HandleError> for ErrorWrapper {
|
||||
fn from(e: HandleError) -> ErrorWrapper {
|
||||
ErrorWrapper::Wrapped(format!("{}|{}", error_codes::UNEXPECTED, e.to_string()))
|
||||
}
|
||||
pub enum MatchReason {
|
||||
Keyword,
|
||||
Origin,
|
||||
UrlMatch,
|
||||
PreviousUse,
|
||||
Bookmark,
|
||||
Tags,
|
||||
}
|
||||
|
||||
impl From<Error> for ExternError {
|
||||
fn from(e: Error) -> ExternError {
|
||||
ExternError::new_error(get_code(&e), e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
implement_into_ffi_by_protobuf!(msg_types::SearchResultList);
|
||||
implement_into_ffi_by_protobuf!(msg_types::TopFrecentSiteInfos);
|
||||
implement_into_ffi_by_protobuf!(msg_types::HistoryVisitInfos);
|
||||
implement_into_ffi_by_protobuf!(msg_types::HistoryVisitInfosWithBound);
|
||||
implement_into_ffi_by_protobuf!(msg_types::BookmarkNode);
|
||||
implement_into_ffi_by_protobuf!(msg_types::BookmarkNodeList);
|
||||
implement_into_ffi_by_delegation!(
|
||||
crate::storage::bookmarks::PublicNode,
|
||||
msg_types::BookmarkNode
|
||||
);
|
||||
|
||||
uniffi_macros::include_scaffolding!("places");
|
||||
// Exists just to convince uniffi to generate `liftSequence*` helpers!
|
||||
pub struct Dummy {
|
||||
|
|
|
@ -10,12 +10,13 @@ use crate::bookmark_sync::{
|
|||
use crate::db::db::PlacesDb;
|
||||
use crate::error::*;
|
||||
use crate::import::common::{attached_database, ExecuteOnDrop};
|
||||
use crate::storage::bookmarks::{bookmark_sync::create_synced_bookmark_roots, PublicNode};
|
||||
use crate::types::{BookmarkType, SyncStatus};
|
||||
use crate::storage::bookmarks::{bookmark_sync::create_synced_bookmark_roots, fetch::BookmarkData};
|
||||
use crate::types::SyncStatus;
|
||||
use rusqlite::NO_PARAMS;
|
||||
use serde_derive::*;
|
||||
use sql_support::ConnExt;
|
||||
use std::time::Instant;
|
||||
use sync_guid::Guid;
|
||||
use url::Url;
|
||||
|
||||
// Fennec's bookmarks schema didn't meaningfully change since 17, so this could go as low as that version.
|
||||
|
@ -48,7 +49,7 @@ pub fn import(
|
|||
pub fn import_pinned_sites(
|
||||
places_api: &PlacesApi,
|
||||
path: impl AsRef<std::path::Path>,
|
||||
) -> Result<Vec<PublicNode>> {
|
||||
) -> Result<Vec<BookmarkData>> {
|
||||
let url = crate::util::ensure_url_path(path)?;
|
||||
do_pinned_sites_import(places_api, url)
|
||||
}
|
||||
|
@ -159,7 +160,7 @@ fn do_import(places_api: &PlacesApi, fennec_db_file_url: Url) -> Result<Bookmark
|
|||
fn do_pinned_sites_import(
|
||||
places_api: &PlacesApi,
|
||||
fennec_db_file_url: Url,
|
||||
) -> Result<Vec<PublicNode>> {
|
||||
) -> Result<Vec<BookmarkData>> {
|
||||
let conn_mutex = places_api.get_sync_connection()?;
|
||||
let conn = conn_mutex.lock();
|
||||
let scope = conn.begin_interrupt_scope();
|
||||
|
@ -177,11 +178,13 @@ fn do_pinned_sites_import(
|
|||
log::debug!("Fetching pinned websites");
|
||||
// Grab the pinned websites (they are stored as bookmarks).
|
||||
let mut stmt = conn.prepare(&FETCH_PINNED)?;
|
||||
let pinned_rows = stmt.query_map(NO_PARAMS, public_node_from_fennec_pinned)?;
|
||||
let pinned_rows = stmt.query_map(NO_PARAMS, bookmark_data_from_fennec_pinned)?;
|
||||
scope.err_if_interrupted()?;
|
||||
let mut pinned = Vec::new();
|
||||
for row in pinned_rows {
|
||||
pinned.push(row?);
|
||||
if let Some(bm) = row? {
|
||||
pinned.push(bm);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Successfully fetched pinned websites");
|
||||
|
@ -412,22 +415,26 @@ lazy_static::lazy_static! {
|
|||
;
|
||||
}
|
||||
|
||||
fn public_node_from_fennec_pinned(
|
||||
fn bookmark_data_from_fennec_pinned(
|
||||
row: &rusqlite::Row<'_>,
|
||||
) -> std::result::Result<PublicNode, rusqlite::Error> {
|
||||
Ok(PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
) -> std::result::Result<Option<BookmarkData>, rusqlite::Error> {
|
||||
let url = match row
|
||||
.get::<_, Option<String>>("url")?
|
||||
.and_then(|s| Url::parse(&s).ok())
|
||||
{
|
||||
None => return Ok(None),
|
||||
Some(url) => url,
|
||||
};
|
||||
|
||||
Ok(Some(BookmarkData {
|
||||
guid: row.get::<_, String>("guid")?.into(),
|
||||
parent_guid: None,
|
||||
parent_guid: Guid::empty(),
|
||||
position: row.get("position")?,
|
||||
date_added: row.get("created")?,
|
||||
last_modified: row.get("modified")?,
|
||||
title: row.get::<_, Option<String>>("title")?,
|
||||
url: row
|
||||
.get::<_, Option<String>>("url")?
|
||||
.and_then(|s| Url::parse(&s).ok()),
|
||||
..Default::default()
|
||||
})
|
||||
url,
|
||||
}))
|
||||
}
|
||||
|
||||
mod sql_fns {
|
||||
|
|
|
@ -24,10 +24,6 @@ pub mod storage;
|
|||
mod tests;
|
||||
mod util;
|
||||
|
||||
pub mod msg_types {
|
||||
include!("mozilla.appservices.places.protobuf.rs");
|
||||
}
|
||||
|
||||
pub use crate::api::apply_observation;
|
||||
#[cfg(test)]
|
||||
pub use crate::api::places_api::test;
|
||||
|
@ -35,8 +31,7 @@ pub use crate::api::places_api::{get_registered_sync_engine, ConnectionType, Pla
|
|||
|
||||
pub use crate::db::PlacesDb;
|
||||
pub use crate::error::*;
|
||||
pub use crate::observation::VisitObservation;
|
||||
pub use crate::observation::*;
|
||||
pub use crate::storage::PageInfo;
|
||||
pub use crate::storage::RowId;
|
||||
pub use crate::types::*;
|
||||
pub use ffi::{APIS, CONNECTIONS};
|
||||
|
|
|
@ -1,207 +0,0 @@
|
|||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HistoryVisitInfo {
|
||||
#[prost(string, required, tag="1")]
|
||||
pub url: ::prost::alloc::string::String,
|
||||
#[prost(string, optional, tag="2")]
|
||||
pub title: ::core::option::Option<::prost::alloc::string::String>,
|
||||
#[prost(int64, required, tag="3")]
|
||||
pub timestamp: i64,
|
||||
#[prost(int32, required, tag="4")]
|
||||
pub visit_type: i32,
|
||||
#[prost(bool, required, tag="5")]
|
||||
pub is_hidden: bool,
|
||||
#[prost(string, optional, tag="6")]
|
||||
pub preview_image_url: ::core::option::Option<::prost::alloc::string::String>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HistoryVisitInfos {
|
||||
#[prost(message, repeated, tag="1")]
|
||||
pub infos: ::prost::alloc::vec::Vec<HistoryVisitInfo>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HistoryVisitInfosWithBound {
|
||||
#[prost(message, repeated, tag="1")]
|
||||
pub infos: ::prost::alloc::vec::Vec<HistoryVisitInfo>,
|
||||
#[prost(int64, required, tag="2")]
|
||||
pub bound: i64,
|
||||
#[prost(int64, required, tag="3")]
|
||||
pub offset: i64,
|
||||
}
|
||||
///*
|
||||
/// A bookmark node.
|
||||
///
|
||||
/// We use a single message type for bookmarks. It covers insertion, deletion,
|
||||
/// and update, and represents all three bookmark types.
|
||||
///
|
||||
/// This simplifies the FFI by reducing the number of types that must go across
|
||||
/// it, and retuces boilderplate, but removes some static-ish guarantees we
|
||||
/// might have otherwise.
|
||||
///
|
||||
/// Note that these docs comments are internal, and don't necessarily impact the actual
|
||||
/// API we expose to Kotlin/Swift (this is particularly true around reads).
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct BookmarkNode {
|
||||
///*
|
||||
/// The type of this bookmark, a `BookmarkType` (from `types.rs`).
|
||||
///
|
||||
/// This impacts which fields may be present.
|
||||
///
|
||||
/// It's illegal to attempt to change this when updating a bookmark.
|
||||
///
|
||||
/// Note: this probably should be an `enum`, but prost seems to get upset
|
||||
/// about it so we're just using int32 for now.
|
||||
///
|
||||
/// Note: this is `node_type` and not `type` because `type` is reserved
|
||||
/// in Rust.
|
||||
///
|
||||
/// - Always returned on reads.
|
||||
/// - Required for inserts.
|
||||
/// - Not provided for updates.
|
||||
#[prost(int32, optional, tag="1")]
|
||||
pub node_type: ::core::option::Option<i32>,
|
||||
///*
|
||||
/// The bookmarks guid.
|
||||
///
|
||||
/// - Always returned on reads.
|
||||
/// - Not allowed for inserts.
|
||||
/// - Required for updates (specifies which record is being changed)
|
||||
#[prost(string, optional, tag="2")]
|
||||
pub guid: ::core::option::Option<::prost::alloc::string::String>,
|
||||
///*
|
||||
/// Creation time, in milliseconds since the unix epoch.
|
||||
///
|
||||
/// May not be a local timestamp, and may shift if new devices are able to
|
||||
/// provide an earlier (but still valid) timestamp.
|
||||
///
|
||||
/// - Always returned on reads.
|
||||
/// - Ignored for insertion and update.
|
||||
#[prost(int64, optional, tag="3")]
|
||||
pub date_added: ::core::option::Option<i64>,
|
||||
///*
|
||||
/// Last modification time, in milliseconds since the unix epoch.
|
||||
///
|
||||
/// - Always returned on reads.
|
||||
/// - Ignored for insertion and update.
|
||||
#[prost(int64, optional, tag="4")]
|
||||
pub last_modified: ::core::option::Option<i64>,
|
||||
///*
|
||||
/// Guid of the parent record.
|
||||
///
|
||||
/// - Returned on reads, except for reads of the bookmark root.
|
||||
/// - Required for insertion.
|
||||
/// - On updates, if provided, we treat it as a move.
|
||||
/// - Interacts with `position`, see its documentation below
|
||||
/// for details on how.
|
||||
#[prost(string, optional, tag="5")]
|
||||
pub parent_guid: ::core::option::Option<::prost::alloc::string::String>,
|
||||
///*
|
||||
/// Zero based index within the parent.
|
||||
///
|
||||
/// - Not provided on reads (for now).
|
||||
///
|
||||
/// - Allowed for insertion.
|
||||
/// - Leaving it out means 'end of folder'.
|
||||
///
|
||||
/// - Allowed for updates.
|
||||
/// - If `parent_guid` is not provided and `position` is, we treat this
|
||||
/// a move within the same folder.
|
||||
///
|
||||
/// - If `parent_guid` and `position` are both provided, we treat this as
|
||||
/// a move to / within that folder, and we insert at the requested
|
||||
/// position.
|
||||
///
|
||||
/// - If `position` is not provided (and `parent_guid` is) then it's
|
||||
/// treated as a move to the end of that folder.
|
||||
#[prost(uint32, optional, tag="6")]
|
||||
pub position: ::core::option::Option<u32>,
|
||||
///*
|
||||
/// Bookmark title. Not present for type = `BookmarkType::Separator`.
|
||||
///
|
||||
/// - Returned on reads if it exists.
|
||||
/// - Required when inserting folders.
|
||||
#[prost(string, optional, tag="7")]
|
||||
pub title: ::core::option::Option<::prost::alloc::string::String>,
|
||||
///*
|
||||
/// Bookmark URL. Only allowed/present for type = `BookmarkType::Bookmark`.
|
||||
///
|
||||
/// - Always returned on reads (for `BookmarkType::Bookmark`).
|
||||
/// - Required when inserting a new bookmark.
|
||||
#[prost(string, optional, tag="8")]
|
||||
pub url: ::core::option::Option<::prost::alloc::string::String>,
|
||||
///*
|
||||
/// IDs of folder children, in order. Only present for type =
|
||||
/// `BookmarkType::Folder`.
|
||||
///
|
||||
/// - Returned on reads (for `BookmarkType::Folder`).
|
||||
/// - Forbidden for insertions and updates.
|
||||
/// - Not provided if `child_nodes` is provided, to avoid sending more data
|
||||
/// over the FFI than necessary.
|
||||
#[prost(string, repeated, tag="9")]
|
||||
pub child_guids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
|
||||
///*
|
||||
/// Data about folder children, in order. Only present for type =
|
||||
/// `BookmarkType::Folder`.
|
||||
///
|
||||
/// For performance reasons, this only is provided if it's requested.
|
||||
#[prost(message, repeated, tag="10")]
|
||||
pub child_nodes: ::prost::alloc::vec::Vec<BookmarkNode>,
|
||||
///*
|
||||
/// Returned by reads, and used to distinguish between the cases of
|
||||
/// "empty child_nodes because the API doesn't return children" and
|
||||
/// "empty child_nodes because this folder has no children (but
|
||||
/// we'd populate them if it had them)".
|
||||
///
|
||||
/// Only required because you can't have `optional repeated`.
|
||||
///
|
||||
/// Leaving this out is equivalent to false.
|
||||
#[prost(bool, optional, tag="11")]
|
||||
pub have_child_nodes: ::core::option::Option<bool>,
|
||||
}
|
||||
///* An array of bookmark nodes, since we can't represent that directly
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct BookmarkNodeList {
|
||||
#[prost(message, repeated, tag="1")]
|
||||
pub nodes: ::prost::alloc::vec::Vec<BookmarkNode>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct SearchResultMessage {
|
||||
#[prost(string, required, tag="1")]
|
||||
pub url: ::prost::alloc::string::String,
|
||||
#[prost(string, required, tag="2")]
|
||||
pub title: ::prost::alloc::string::String,
|
||||
#[prost(int64, required, tag="3")]
|
||||
pub frecency: i64,
|
||||
#[prost(enumeration="SearchResultReason", repeated, tag="4")]
|
||||
pub reasons: ::prost::alloc::vec::Vec<i32>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct SearchResultList {
|
||||
#[prost(message, repeated, tag="1")]
|
||||
pub results: ::prost::alloc::vec::Vec<SearchResultMessage>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct TopFrecentSiteInfo {
|
||||
#[prost(string, required, tag="1")]
|
||||
pub url: ::prost::alloc::string::String,
|
||||
#[prost(string, optional, tag="2")]
|
||||
pub title: ::core::option::Option<::prost::alloc::string::String>,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct TopFrecentSiteInfos {
|
||||
#[prost(message, repeated, tag="1")]
|
||||
pub infos: ::prost::alloc::vec::Vec<TopFrecentSiteInfo>,
|
||||
}
|
||||
/// Protobuf allows nesting these, but prost behaves weirdly if we do.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
|
||||
#[repr(i32)]
|
||||
pub enum SearchResultReason {
|
||||
/// Never used in practice. Maybe remove this from here and from the rust enum?
|
||||
Keyword = 1,
|
||||
Origin = 2,
|
||||
Url = 3,
|
||||
PreviousUse = 4,
|
||||
Bookmark = 5,
|
||||
/// If we get real tag support, just add `optional string tags` to SearchResult below, but
|
||||
/// for now expose that it was because of tags.
|
||||
Tag = 6,
|
||||
}
|
|
@ -3,7 +3,6 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::types::*;
|
||||
use serde_derive::*;
|
||||
use types::Timestamp;
|
||||
use url::Url;
|
||||
|
||||
|
@ -20,56 +19,24 @@ use url::Url;
|
|||
/// It exposes a "builder api", but for convenience, that API allows Options too.
|
||||
/// So, eg, `.with_title(None)` or `with_is_error(None)` is allowed but records
|
||||
/// no observation.
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug)]
|
||||
pub struct VisitObservation {
|
||||
/// Ideally, we'd use url::Url here with `serde_url`, but we really would
|
||||
/// like to expose these errors over the FFI as UrlParseErrors and not json
|
||||
/// errors, and we also would like to do so without parsing strings.
|
||||
pub url: String,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub url: Url,
|
||||
pub title: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub visit_type: Option<VisitTransition>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub is_error: Option<bool>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub is_redirect_source: Option<bool>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub is_permanent_redirect_source: Option<bool>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub at: Option<Timestamp>,
|
||||
|
||||
/// Semantically also a url::Url, See the comment about the `url` property.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub referrer: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub referrer: Option<Url>,
|
||||
pub is_remote: Option<bool>,
|
||||
|
||||
/// Semantically also a url::Url, See the comment about the `url` property.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default)]
|
||||
pub preview_image_url: Option<String>,
|
||||
pub preview_image_url: Option<Url>,
|
||||
}
|
||||
|
||||
impl VisitObservation {
|
||||
pub fn new(url: Url) -> Self {
|
||||
VisitObservation {
|
||||
url: url.into(),
|
||||
url,
|
||||
title: None,
|
||||
visit_type: None,
|
||||
is_error: None,
|
||||
|
@ -125,8 +92,7 @@ impl VisitObservation {
|
|||
self
|
||||
}
|
||||
|
||||
// v is a String instead of a Url to allow testing invalid urls as input.
|
||||
pub fn with_preview_image_url(mut self, v: impl Into<Option<String>>) -> Self {
|
||||
pub fn with_preview_image_url(mut self, v: impl Into<Option<Url>>) -> Self {
|
||||
self.preview_image_url = v.into();
|
||||
self
|
||||
}
|
||||
|
|
|
@ -2,30 +2,212 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
[Wrapped]
|
||||
typedef string Url;
|
||||
[Wrapped]
|
||||
typedef i64 Timestamp;
|
||||
[Wrapped]
|
||||
typedef i32 VisitTransitionSet;
|
||||
[Wrapped]
|
||||
typedef string Guid;
|
||||
|
||||
namespace places {
|
||||
[Throws=ErrorWrapper]
|
||||
HistoryMetadata? places_get_latest_history_metadata_for_url(i64 handle, string url);
|
||||
[Throws=PlacesError]
|
||||
PlacesApi places_api_new(string db_path);
|
||||
};
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
sequence<HistoryMetadata> places_get_history_metadata_between(i64 handle, i64 start, i64 end);
|
||||
enum ConnectionType {
|
||||
"ReadOnly",
|
||||
"ReadWrite",
|
||||
"Sync", // ideally we wouldn't need to expose this and should guard against it being used (it's internal only)
|
||||
};
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
sequence<HistoryMetadata> places_get_history_metadata_since(i64 handle, i64 start);
|
||||
interface SqlInterruptHandle {
|
||||
void interrupt();
|
||||
};
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
sequence<HistoryMetadata> places_query_history_metadata(i64 handle, string query, i32 limit);
|
||||
interface PlacesApi {
|
||||
[Throws=PlacesError]
|
||||
PlacesConnection new_connection(ConnectionType conn_type);
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
sequence<HistoryHighlight> places_get_history_highlights(i64 handle, HistoryHighlightWeights weights, i32 limit);
|
||||
[Throws=PlacesError]
|
||||
SqlInterruptHandle new_sync_conn_interrupt_handle();
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
void places_note_history_metadata_observation(i64 handle, HistoryMetadataObservation data);
|
||||
[Self=ByArc]
|
||||
void register_with_sync_manager();
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
void places_metadata_delete(i64 handle, string url, string? referrer_url, string? search_term);
|
||||
[Throws=PlacesError]
|
||||
void reset_history();
|
||||
|
||||
[Throws=ErrorWrapper]
|
||||
void places_metadata_delete_older_than(i64 handle, i64 older_than);
|
||||
[Throws=PlacesError]
|
||||
string history_sync(string key_id, string access_token, string sync_key, Url tokenserver_url);
|
||||
|
||||
[Throws=PlacesError]
|
||||
string bookmarks_sync(string key_id, string access_token, string sync_key, Url tokenserver_url);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<BookmarkItem> places_pinned_sites_import_from_fennec(string db_path);
|
||||
|
||||
[Throws=PlacesError]
|
||||
string places_history_import_from_fennec(string db_path);
|
||||
|
||||
[Throws=PlacesError]
|
||||
string places_bookmarks_import_from_fennec(string db_path);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void places_bookmarks_import_from_ios(string db_path);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void bookmarks_reset();
|
||||
};
|
||||
|
||||
interface PlacesConnection {
|
||||
|
||||
[Throws=PlacesError]
|
||||
SqlInterruptHandle new_interrupt_handle();
|
||||
|
||||
[Throws=PlacesError]
|
||||
HistoryMetadata? get_latest_history_metadata_for_url(Url url);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<HistoryMetadata> get_history_metadata_between(Timestamp start, Timestamp end);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<HistoryMetadata> get_history_metadata_since(Timestamp since);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<SearchResult> query_autocomplete(string search, i32 limit);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void accept_result(string search_string, Url url);
|
||||
|
||||
[Throws=PlacesError]
|
||||
Url? match_url(string query);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<HistoryMetadata> query_history_metadata(string query, i32 limit);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<HistoryHighlight> get_history_highlights(HistoryHighlightWeights weights, i32 limit);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void note_history_metadata_observation(HistoryMetadataObservation data);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void metadata_delete(Url url, Url? referrer_url, string? search_term);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void metadata_delete_older_than(Timestamp older_than);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void apply_observation(VisitObservation visit);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<Url> get_visited_urls_in_range(Timestamp start, Timestamp end, boolean include_remote);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<HistoryVisitInfo> get_visit_infos(Timestamp start_date, Timestamp end_date, VisitTransitionSet exclude_types);
|
||||
|
||||
[Throws=PlacesError]
|
||||
i64 get_visit_count(VisitTransitionSet exclude_types);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<HistoryVisitInfo> get_visit_page(i64 offset, i64 count, VisitTransitionSet exclude_types);
|
||||
|
||||
[Throws=PlacesError]
|
||||
HistoryVisitInfosWithBound get_visit_page_with_bound(i64 bound, i64 offset, i64 count, VisitTransitionSet exclude_types);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<boolean> get_visited(sequence<string> urls);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void delete_visits_for(string url);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void delete_visits_between(Timestamp start, Timestamp end);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void delete_visit(string url, Timestamp timestamp);
|
||||
|
||||
[Throws=PlacesError]
|
||||
sequence<TopFrecentSiteInfo> get_top_frecent_site_infos(i32 num_items, FrecencyThresholdOption threshold_option);
|
||||
|
||||
// These three methods below are not actively being used by the consumers, we should investigate further
|
||||
// and remove if so https://github.com/mozilla/application-services/issues/4719
|
||||
[Throws=PlacesError]
|
||||
void wipe_local_history();
|
||||
|
||||
//From a-c: will not remove any history from remote devices, but it will prevent deleted
|
||||
// history from returning.
|
||||
[Throws=PlacesError]
|
||||
void delete_everything_history();
|
||||
|
||||
// Exactly the same as wipe_local_history
|
||||
[Throws=PlacesError]
|
||||
void prune_destructively();
|
||||
|
||||
[Throws=PlacesError]
|
||||
void run_maintenance();
|
||||
|
||||
[Throws=PlacesError]
|
||||
BookmarkItem? bookmarks_get_tree([ByRef] Guid item_guid);
|
||||
|
||||
[Throws=PlacesError]
|
||||
BookmarkItem? bookmarks_get_by_guid([ByRef] Guid guid, boolean get_direct_children);
|
||||
|
||||
// XXX - should return BookmarkData
|
||||
[Throws=PlacesError]
|
||||
sequence<BookmarkItem> bookmarks_get_all_with_url(Url url);
|
||||
|
||||
// XXX - should return BookmarkData
|
||||
[Throws=PlacesError]
|
||||
sequence<BookmarkItem> bookmarks_search(string query, i32 limit);
|
||||
|
||||
// XXX - should return BookmarkData
|
||||
[Throws=PlacesError]
|
||||
sequence<BookmarkItem> bookmarks_get_recent(i32 limit);
|
||||
|
||||
[Throws=PlacesError]
|
||||
boolean bookmarks_delete(Guid id);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void bookmarks_delete_everything();
|
||||
|
||||
[Throws=PlacesError]
|
||||
Url? bookmarks_get_url_for_keyword(string keyword);
|
||||
|
||||
[Throws=PlacesError]
|
||||
void bookmarks_update(BookmarkUpdateInfo data);
|
||||
|
||||
[Throws=PlacesError]
|
||||
Guid bookmarks_insert(InsertableBookmarkItem bookmark);
|
||||
};
|
||||
|
||||
/**
|
||||
* Frecency threshold options for fetching top frecent sites. Requests a page that was visited
|
||||
* with a frecency score greater or equal to the value associated with the enums
|
||||
*/
|
||||
enum FrecencyThresholdOption {
|
||||
// Returns all visited pages. The frecency score is 0
|
||||
"None",
|
||||
// Skip visited pages that were only visited once. The frecency score is 101
|
||||
"SkipOneTimePages",
|
||||
};
|
||||
|
||||
dictionary SearchResult {
|
||||
Url url;
|
||||
string title;
|
||||
i64 frecency;
|
||||
sequence<MatchReason> reasons;
|
||||
};
|
||||
|
||||
enum MatchReason {
|
||||
"Keyword",
|
||||
"Origin",
|
||||
"UrlMatch",
|
||||
"PreviousUse",
|
||||
"Bookmark",
|
||||
"Tags"
|
||||
};
|
||||
|
||||
// Some kind of namespacing for uniffi would be ideal. Multiple udl/macro defns?
|
||||
|
@ -38,6 +220,21 @@ enum DocumentType {
|
|||
"Media",
|
||||
};
|
||||
|
||||
enum VisitTransition {
|
||||
// This transition type means the user followed a link.
|
||||
"Link",
|
||||
// This transition type means that the user typed the page's URL in the
|
||||
// URL bar or selected it from UI (URL bar autocomplete results, etc)
|
||||
"Typed",
|
||||
"Bookmark",
|
||||
"Embed",
|
||||
"RedirectPermanent",
|
||||
"RedirectTemporary",
|
||||
"Download",
|
||||
"FramedLink",
|
||||
"Reload",
|
||||
};
|
||||
|
||||
// This is used as an "input" to the api.
|
||||
dictionary HistoryMetadataObservation {
|
||||
string url;
|
||||
|
@ -74,12 +271,149 @@ dictionary HistoryHighlight {
|
|||
string? preview_image_url;
|
||||
};
|
||||
|
||||
dictionary HistoryVisitInfo {
|
||||
Url url;
|
||||
string? title;
|
||||
Timestamp timestamp;
|
||||
VisitTransition visit_type;
|
||||
boolean is_hidden;
|
||||
Url? preview_image_url;
|
||||
};
|
||||
|
||||
dictionary HistoryVisitInfosWithBound {
|
||||
sequence<HistoryVisitInfo> infos;
|
||||
i64 bound;
|
||||
i64 offset;
|
||||
};
|
||||
|
||||
/**
|
||||
* Encapsulates either information about a visit to a page, or meta information about the page,
|
||||
* or both. Use [VisitType.UPDATE_PLACE] to differentiate an update from a visit.
|
||||
*/
|
||||
dictionary VisitObservation {
|
||||
Url url;
|
||||
string? title = null;
|
||||
VisitTransition? visit_type;
|
||||
boolean? is_error = null;
|
||||
boolean? is_redirect_source = null;
|
||||
boolean? is_permanent_redirect_source = null;
|
||||
Timestamp? at = null;
|
||||
Url? referrer = null;
|
||||
boolean? is_remote = null;
|
||||
Url? preview_image_url = null;
|
||||
};
|
||||
|
||||
// Exists just to convince uniffi to generate `liftSequence*` helpers!
|
||||
dictionary Dummy {
|
||||
sequence<HistoryMetadata>? md;
|
||||
};
|
||||
|
||||
[Error]
|
||||
enum ErrorWrapper {
|
||||
"Wrapped"
|
||||
dictionary TopFrecentSiteInfo {
|
||||
Url url;
|
||||
string? title;
|
||||
};
|
||||
|
||||
|
||||
[Error]
|
||||
enum PlacesError {
|
||||
"UnexpectedPlacesException",
|
||||
"UrlParseFailed",
|
||||
"JsonParseFailed",
|
||||
"PlacesConnectionBusy",
|
||||
"OperationInterrupted",
|
||||
"BookmarksCorruption",
|
||||
"InvalidParent",
|
||||
"UnknownBookmarkItem",
|
||||
"UrlTooLong",
|
||||
"InvalidBookmarkUpdate", // XXX - can we kill this?
|
||||
"CannotUpdateRoot",
|
||||
"InternalPanic",
|
||||
};
|
||||
|
||||
dictionary BookmarkData {
|
||||
Guid guid;
|
||||
Guid parent_guid;
|
||||
u32 position;
|
||||
Timestamp date_added;
|
||||
Timestamp last_modified;
|
||||
Url url;
|
||||
string? title;
|
||||
};
|
||||
|
||||
dictionary BookmarkSeparator {
|
||||
Guid guid;
|
||||
Timestamp date_added;
|
||||
Timestamp last_modified;
|
||||
Guid parent_guid;
|
||||
u32 position;
|
||||
};
|
||||
|
||||
dictionary BookmarkFolder {
|
||||
Guid guid;
|
||||
Timestamp date_added;
|
||||
Timestamp last_modified;
|
||||
Guid? parent_guid;
|
||||
u32 position;
|
||||
string? title;
|
||||
sequence<Guid>? child_guids;
|
||||
sequence<BookmarkItem>? child_nodes;
|
||||
};
|
||||
|
||||
[Enum]
|
||||
interface BookmarkItem {
|
||||
Bookmark(BookmarkData b);
|
||||
Separator(BookmarkSeparator s);
|
||||
Folder(BookmarkFolder f);
|
||||
};
|
||||
|
||||
dictionary BookmarkUpdateInfo {
|
||||
Guid guid;
|
||||
string? title;
|
||||
string? url;
|
||||
Guid? parent_guid;
|
||||
u32? position;
|
||||
};
|
||||
|
||||
// Structs for inserting new bookmark items.
|
||||
|
||||
// Where the item should be placed.
|
||||
[Enum]
|
||||
interface BookmarkPosition {
|
||||
Specific(u32 pos);
|
||||
Append();
|
||||
};
|
||||
|
||||
dictionary InsertableBookmark {
|
||||
Guid? guid = null;
|
||||
Guid parent_guid;
|
||||
BookmarkPosition position;
|
||||
Timestamp? date_added = null;
|
||||
Timestamp? last_modified = null;
|
||||
Url url;
|
||||
string? title = null;
|
||||
};
|
||||
|
||||
dictionary InsertableBookmarkSeparator {
|
||||
Guid? guid = null;
|
||||
Guid parent_guid;
|
||||
BookmarkPosition position;
|
||||
Timestamp? date_added = null;
|
||||
Timestamp? last_modified = null;
|
||||
};
|
||||
|
||||
dictionary InsertableBookmarkFolder {
|
||||
Guid? guid = null;
|
||||
Guid parent_guid;
|
||||
BookmarkPosition position;
|
||||
Timestamp? date_added = null;
|
||||
Timestamp? last_modified = null;
|
||||
string? title = null;
|
||||
sequence<InsertableBookmarkItem> children;
|
||||
};
|
||||
|
||||
[Enum]
|
||||
interface InsertableBookmarkItem {
|
||||
Bookmark(InsertableBookmark b);
|
||||
Folder(InsertableBookmarkFolder f);
|
||||
Separator(InsertableBookmarkSeparator s);
|
||||
};
|
||||
|
|
|
@ -1,208 +0,0 @@
|
|||
syntax = "proto2";
|
||||
|
||||
// Note: this file name must be unique due to how the iOS megazord works :(
|
||||
|
||||
package mozilla.appservices.places.protobuf;
|
||||
|
||||
option java_package = "mozilla.appservices.places";
|
||||
option java_outer_classname = "MsgTypes";
|
||||
option swift_prefix = "MsgTypes_";
|
||||
option optimize_for = LITE_RUNTIME;
|
||||
|
||||
message HistoryVisitInfo {
|
||||
required string url = 1;
|
||||
optional string title = 2;
|
||||
required int64 timestamp = 3;
|
||||
required int32 visit_type = 4;
|
||||
required bool is_hidden = 5;
|
||||
optional string preview_image_url = 6;
|
||||
}
|
||||
|
||||
message HistoryVisitInfos {
|
||||
repeated HistoryVisitInfo infos = 1;
|
||||
}
|
||||
|
||||
message HistoryVisitInfosWithBound {
|
||||
repeated HistoryVisitInfo infos = 1;
|
||||
required int64 bound = 2;
|
||||
required int64 offset = 3;
|
||||
}
|
||||
|
||||
/**
|
||||
* A bookmark node.
|
||||
*
|
||||
* We use a single message type for bookmarks. It covers insertion, deletion,
|
||||
* and update, and represents all three bookmark types.
|
||||
*
|
||||
* This simplifies the FFI by reducing the number of types that must go across
|
||||
* it, and retuces boilderplate, but removes some static-ish guarantees we
|
||||
* might have otherwise.
|
||||
*
|
||||
* Note that these docs comments are internal, and don't necessarily impact the actual
|
||||
* API we expose to Kotlin/Swift (this is particularly true around reads).
|
||||
*/
|
||||
message BookmarkNode {
|
||||
/**
|
||||
* The type of this bookmark, a `BookmarkType` (from `types.rs`).
|
||||
*
|
||||
* This impacts which fields may be present.
|
||||
*
|
||||
* It's illegal to attempt to change this when updating a bookmark.
|
||||
*
|
||||
* Note: this probably should be an `enum`, but prost seems to get upset
|
||||
* about it so we're just using int32 for now.
|
||||
*
|
||||
* Note: this is `node_type` and not `type` because `type` is reserved
|
||||
* in Rust.
|
||||
*
|
||||
* - Always returned on reads.
|
||||
* - Required for inserts.
|
||||
* - Not provided for updates.
|
||||
*/
|
||||
optional int32 node_type = 1;
|
||||
|
||||
/**
|
||||
* The bookmarks guid.
|
||||
*
|
||||
* - Always returned on reads.
|
||||
* - Not allowed for inserts.
|
||||
* - Required for updates (specifies which record is being changed)
|
||||
*/
|
||||
optional string guid = 2;
|
||||
|
||||
/**
|
||||
* Creation time, in milliseconds since the unix epoch.
|
||||
*
|
||||
* May not be a local timestamp, and may shift if new devices are able to
|
||||
* provide an earlier (but still valid) timestamp.
|
||||
*
|
||||
* - Always returned on reads.
|
||||
* - Ignored for insertion and update.
|
||||
*/
|
||||
optional int64 date_added = 3;
|
||||
|
||||
/**
|
||||
* Last modification time, in milliseconds since the unix epoch.
|
||||
*
|
||||
* - Always returned on reads.
|
||||
* - Ignored for insertion and update.
|
||||
*/
|
||||
optional int64 last_modified = 4;
|
||||
|
||||
/**
|
||||
* Guid of the parent record.
|
||||
*
|
||||
* - Returned on reads, except for reads of the bookmark root.
|
||||
* - Required for insertion.
|
||||
* - On updates, if provided, we treat it as a move.
|
||||
* - Interacts with `position`, see its documentation below
|
||||
* for details on how.
|
||||
*/
|
||||
optional string parent_guid = 5;
|
||||
|
||||
/**
|
||||
* Zero based index within the parent.
|
||||
*
|
||||
* - Not provided on reads (for now).
|
||||
*
|
||||
* - Allowed for insertion.
|
||||
* - Leaving it out means 'end of folder'.
|
||||
*
|
||||
* - Allowed for updates.
|
||||
* - If `parent_guid` is not provided and `position` is, we treat this
|
||||
* a move within the same folder.
|
||||
*
|
||||
* - If `parent_guid` and `position` are both provided, we treat this as
|
||||
* a move to / within that folder, and we insert at the requested
|
||||
* position.
|
||||
*
|
||||
* - If `position` is not provided (and `parent_guid` is) then it's
|
||||
* treated as a move to the end of that folder.
|
||||
*/
|
||||
optional uint32 position = 6;
|
||||
|
||||
/**
|
||||
* Bookmark title. Not present for type = `BookmarkType::Separator`.
|
||||
*
|
||||
* - Returned on reads if it exists.
|
||||
* - Required when inserting folders.
|
||||
*/
|
||||
optional string title = 7;
|
||||
|
||||
/**
|
||||
* Bookmark URL. Only allowed/present for type = `BookmarkType::Bookmark`.
|
||||
*
|
||||
* - Always returned on reads (for `BookmarkType::Bookmark`).
|
||||
* - Required when inserting a new bookmark.
|
||||
*/
|
||||
optional string url = 8;
|
||||
|
||||
/**
|
||||
* IDs of folder children, in order. Only present for type =
|
||||
* `BookmarkType::Folder`.
|
||||
*
|
||||
* - Returned on reads (for `BookmarkType::Folder`).
|
||||
* - Forbidden for insertions and updates.
|
||||
* - Not provided if `child_nodes` is provided, to avoid sending more data
|
||||
* over the FFI than necessary.
|
||||
*/
|
||||
repeated string child_guids = 9;
|
||||
|
||||
/**
|
||||
* Data about folder children, in order. Only present for type =
|
||||
* `BookmarkType::Folder`.
|
||||
*
|
||||
* For performance reasons, this only is provided if it's requested.
|
||||
*/
|
||||
repeated BookmarkNode child_nodes = 10;
|
||||
|
||||
/**
|
||||
* Returned by reads, and used to distinguish between the cases of
|
||||
* "empty child_nodes because the API doesn't return children" and
|
||||
* "empty child_nodes because this folder has no children (but
|
||||
* we'd populate them if it had them)".
|
||||
*
|
||||
* Only required because you can't have `optional repeated`.
|
||||
*
|
||||
* Leaving this out is equivalent to false.
|
||||
*/
|
||||
optional bool have_child_nodes = 11;
|
||||
}
|
||||
|
||||
/** An array of bookmark nodes, since we can't represent that directly */
|
||||
message BookmarkNodeList {
|
||||
repeated BookmarkNode nodes = 1;
|
||||
}
|
||||
|
||||
// Protobuf allows nesting these, but prost behaves weirdly if we do.
|
||||
enum SearchResultReason {
|
||||
// Never used in practice. Maybe remove this from here and from the rust enum?
|
||||
KEYWORD = 1;
|
||||
ORIGIN = 2;
|
||||
URL = 3;
|
||||
PREVIOUS_USE = 4;
|
||||
BOOKMARK = 5;
|
||||
// If we get real tag support, just add `optional string tags` to SearchResult below, but
|
||||
// for now expose that it was because of tags.
|
||||
TAG = 6;
|
||||
}
|
||||
|
||||
message SearchResultMessage {
|
||||
required string url = 1;
|
||||
required string title = 2;
|
||||
required int64 frecency = 3;
|
||||
repeated SearchResultReason reasons = 4 [packed = true];
|
||||
}
|
||||
|
||||
message SearchResultList {
|
||||
repeated SearchResultMessage results = 1;
|
||||
}
|
||||
|
||||
message TopFrecentSiteInfo {
|
||||
required string url = 1;
|
||||
optional string title = 2;
|
||||
}
|
||||
|
||||
message TopFrecentSiteInfos {
|
||||
repeated TopFrecentSiteInfo infos = 1;
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -3,205 +3,18 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use super::{
|
||||
BookmarkPosition, BookmarkRootGuid, BookmarkTreeNode, InsertableBookmark, InsertableFolder,
|
||||
InsertableItem, InsertableSeparator, PublicNode, RawBookmark, UpdatableBookmark,
|
||||
UpdatableFolder, UpdatableItem, UpdatableSeparator, UpdateTreeLocation,
|
||||
BookmarkPosition, BookmarkUpdateInfo, InvalidPlaceInfo, UpdatableBookmark, UpdatableFolder,
|
||||
UpdatableItem, UpdatableSeparator, UpdateTreeLocation,
|
||||
};
|
||||
|
||||
use crate::error::{InvalidPlaceInfo, Result};
|
||||
use crate::msg_types;
|
||||
use crate::error::Result;
|
||||
use crate::types::BookmarkType;
|
||||
use sync_guid::Guid as SyncGuid;
|
||||
use url::Url;
|
||||
|
||||
impl From<BookmarkTreeNode> for PublicNode {
|
||||
// TODO: Eventually this should either be a function that takes an
|
||||
// SqlInterruptScope, or we should have another version that does.
|
||||
// For now it is likely fine.
|
||||
fn from(n: BookmarkTreeNode) -> Self {
|
||||
let (date_added, last_modified) = n.created_modified();
|
||||
let mut result = Self {
|
||||
node_type: n.node_type(),
|
||||
guid: n.guid().clone(),
|
||||
date_added,
|
||||
last_modified,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Not the most idiomatic, but avoids a lot of duplication.
|
||||
match n {
|
||||
BookmarkTreeNode::Bookmark(b) => {
|
||||
result.title = b.title;
|
||||
result.url = Some(b.url);
|
||||
}
|
||||
BookmarkTreeNode::Separator(_) => {
|
||||
// No separator-specific properties.
|
||||
}
|
||||
BookmarkTreeNode::Folder(f) => {
|
||||
result.title = f.title;
|
||||
let own_guid = &result.guid;
|
||||
result.child_nodes = Some(
|
||||
f.children
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, bn)| {
|
||||
let mut child = PublicNode::from(bn);
|
||||
child.parent_guid = Some(own_guid.clone());
|
||||
child.position = i as u32;
|
||||
child
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PublicNode> for msg_types::BookmarkNode {
|
||||
fn from(n: PublicNode) -> Self {
|
||||
let have_child_nodes = if n.node_type == BookmarkType::Folder {
|
||||
Some(n.child_nodes.is_some())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Self {
|
||||
node_type: Some(n.node_type as i32),
|
||||
guid: Some(n.guid.into_string()),
|
||||
date_added: Some(n.date_added.0 as i64),
|
||||
last_modified: Some(n.last_modified.0 as i64),
|
||||
title: n.title,
|
||||
url: n.url.map(String::from),
|
||||
parent_guid: n.parent_guid.map(|g| g.into_string()),
|
||||
position: Some(n.position),
|
||||
child_guids: n.child_guids.map_or(vec![], |child_guids| {
|
||||
child_guids
|
||||
.into_iter()
|
||||
.map(|m| m.into_string())
|
||||
.collect::<Vec<String>>()
|
||||
}),
|
||||
child_nodes: n.child_nodes.map_or(vec![], |nodes| {
|
||||
nodes
|
||||
.into_iter()
|
||||
.map(msg_types::BookmarkNode::from)
|
||||
.collect()
|
||||
}),
|
||||
have_child_nodes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Note: this conversion is incomplete if rb is a folder!
|
||||
impl From<RawBookmark> for PublicNode {
|
||||
fn from(rb: RawBookmark) -> Self {
|
||||
Self {
|
||||
node_type: rb.bookmark_type,
|
||||
guid: rb.guid,
|
||||
parent_guid: rb.parent_guid,
|
||||
position: rb.position,
|
||||
date_added: rb.date_added,
|
||||
last_modified: rb.date_modified,
|
||||
url: rb.url,
|
||||
title: rb.title,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<PublicNode>> for msg_types::BookmarkNodeList {
|
||||
fn from(ns: Vec<PublicNode>) -> Self {
|
||||
Self {
|
||||
nodes: ns.into_iter().map(msg_types::BookmarkNode::from).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl msg_types::BookmarkNode {
|
||||
/// Get the BookmarkType, panicking if it's invalid (because it really never
|
||||
/// should be unless we have a bug somewhere).
|
||||
pub(crate) fn get_node_type(&self) -> BookmarkType {
|
||||
let value = self.node_type.unwrap();
|
||||
// Check that the cast wouldn't truncate first.
|
||||
assert!(
|
||||
value >= 0 && value <= i32::from(std::u8::MAX),
|
||||
"wildly illegal node_type: {}",
|
||||
value
|
||||
);
|
||||
|
||||
BookmarkType::from_u8(value as u8).expect("Invalid node_type")
|
||||
}
|
||||
|
||||
/// Convert the protobuf bookmark into information for insertion.
|
||||
pub fn into_insertable(self) -> Result<InsertableItem> {
|
||||
let ty = self.get_node_type();
|
||||
|
||||
let parent_guid = self
|
||||
.parent_guid
|
||||
.map(SyncGuid::from)
|
||||
.unwrap_or_else(|| BookmarkRootGuid::Unfiled.into());
|
||||
|
||||
let position = self
|
||||
.position
|
||||
.map_or(BookmarkPosition::Append, BookmarkPosition::Specific);
|
||||
|
||||
Ok(match ty {
|
||||
BookmarkType::Bookmark => InsertableItem::Bookmark(InsertableBookmark {
|
||||
parent_guid,
|
||||
position,
|
||||
title: self.title,
|
||||
// This will fail if Url is empty, but with a url parse error,
|
||||
// which is what we want.
|
||||
url: Url::parse(&self.url.unwrap_or_default())?,
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
}),
|
||||
BookmarkType::Separator => InsertableItem::Separator(InsertableSeparator {
|
||||
parent_guid,
|
||||
position,
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
}),
|
||||
BookmarkType::Folder => InsertableItem::Folder(InsertableFolder {
|
||||
parent_guid,
|
||||
position,
|
||||
title: self.title,
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// We don't require bookmark type for updates on the other side of the FFI,
|
||||
/// since the type is immutable, and iOS wants to be able to move bookmarks by
|
||||
/// GUID. We also don't/can't enforce as much in the Kotlin/Swift type system
|
||||
/// as we can/do in Rust.
|
||||
///
|
||||
/// This is a type that represents the data we get from the FFI, which we then
|
||||
/// turn into a `UpdatableItem` that we can actually use (we do this by
|
||||
/// reading the type out of the DB, but we can do that transactionally, so it's
|
||||
/// not a problem).
|
||||
///
|
||||
/// It's basically an intermediate between the protobuf message format and
|
||||
/// `UpdatableItem`, used to avoid needing to pass in the `type` to update, and
|
||||
/// to give us a place to check things that we can't enforce in Swift/Kotlin's
|
||||
/// type system, but that we do in Rust's.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct BookmarkUpdateInfo {
|
||||
pub guid: SyncGuid,
|
||||
pub title: Option<String>,
|
||||
pub url: Option<String>,
|
||||
pub parent_guid: Option<SyncGuid>,
|
||||
pub position: Option<u32>,
|
||||
}
|
||||
|
||||
impl BookmarkUpdateInfo {
|
||||
/// Convert the `BookmarkUpdateInfo` into information for updating, (now that
|
||||
/// we know it's node type).
|
||||
/// The functions exposed over the FFI use the same type for all inserts.
|
||||
/// This function converts that into the type our update API uses.
|
||||
pub fn into_updatable(self, ty: BookmarkType) -> Result<(SyncGuid, UpdatableItem)> {
|
||||
// Check the things that otherwise would be enforced by the type system.
|
||||
|
||||
|
@ -215,46 +28,36 @@ impl BookmarkUpdateInfo {
|
|||
|
||||
let location = match (self.parent_guid, self.position) {
|
||||
(None, None) => UpdateTreeLocation::None,
|
||||
(None, Some(pos)) => UpdateTreeLocation::Position(BookmarkPosition::Specific(pos)),
|
||||
(Some(parent_guid), pos) => UpdateTreeLocation::Parent(
|
||||
parent_guid,
|
||||
pos.map_or(BookmarkPosition::Append, BookmarkPosition::Specific),
|
||||
),
|
||||
(None, Some(pos)) => UpdateTreeLocation::Position {
|
||||
pos: BookmarkPosition::Specific { pos },
|
||||
},
|
||||
(Some(parent_guid), pos) => UpdateTreeLocation::Parent {
|
||||
guid: parent_guid,
|
||||
pos: pos.map_or(BookmarkPosition::Append, |p| BookmarkPosition::Specific {
|
||||
pos: p,
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
let updatable = match ty {
|
||||
BookmarkType::Bookmark => UpdatableItem::Bookmark(UpdatableBookmark {
|
||||
location,
|
||||
title: self.title,
|
||||
url: self.url.map(|u| Url::parse(&u)).transpose()?,
|
||||
}),
|
||||
BookmarkType::Separator => UpdatableItem::Separator(UpdatableSeparator { location }),
|
||||
BookmarkType::Folder => UpdatableItem::Folder(UpdatableFolder {
|
||||
location,
|
||||
title: self.title,
|
||||
}),
|
||||
BookmarkType::Bookmark => UpdatableItem::Bookmark {
|
||||
b: UpdatableBookmark {
|
||||
location,
|
||||
title: self.title,
|
||||
url: self.url.map(|u| Url::parse(&u)).transpose()?,
|
||||
},
|
||||
},
|
||||
BookmarkType::Separator => UpdatableItem::Separator {
|
||||
s: UpdatableSeparator { location },
|
||||
},
|
||||
BookmarkType::Folder => UpdatableItem::Folder {
|
||||
f: UpdatableFolder {
|
||||
location,
|
||||
title: self.title,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
Ok((self.guid, updatable))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<msg_types::BookmarkNode> for BookmarkUpdateInfo {
|
||||
fn from(n: msg_types::BookmarkNode) -> Self {
|
||||
Self {
|
||||
// This is a bug in our code on the other side of the FFI,
|
||||
// so expect should be fine.
|
||||
guid: SyncGuid::from(n.guid.expect("Missing guid")),
|
||||
title: n.title,
|
||||
url: n.url,
|
||||
parent_guid: n.parent_guid.map(SyncGuid::from),
|
||||
position: n.position,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<msg_types::BookmarkNode>> for msg_types::BookmarkNodeList {
|
||||
fn from(nodes: Vec<msg_types::BookmarkNode>) -> Self {
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,78 +2,275 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use super::super::bookmarks::FetchDepth;
|
||||
use super::super::bookmarks::json_tree::{self, FetchDepth};
|
||||
use super::*;
|
||||
use crate::msg_types::BookmarkNode as ProtoBookmark;
|
||||
use rusqlite::Row;
|
||||
|
||||
/// This type basically exists to become a msg_types::BookmarkNode, but is
|
||||
/// slightly less of a pain to deal with in rust.
|
||||
// A helper that will ensure tests fail, but in production will make log noise instead.
|
||||
fn noisy_debug_assert_eq<T: std::cmp::PartialEq + std::fmt::Debug>(a: &T, b: &T, msg: &str) {
|
||||
debug_assert_eq!(a, b);
|
||||
if a != b {
|
||||
log::error!("check failed: {}: {:?} != {:?}", msg, a, b)
|
||||
}
|
||||
}
|
||||
|
||||
fn noisy_debug_assert(v: bool, msg: &str) {
|
||||
debug_assert!(v);
|
||||
if !v {
|
||||
log::error!("check failed: {}: expected true, got false", msg)
|
||||
}
|
||||
}
|
||||
|
||||
/// Structs we return when reading bookmarks
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PublicNode {
|
||||
pub node_type: BookmarkType,
|
||||
pub struct BookmarkData {
|
||||
pub guid: SyncGuid,
|
||||
pub parent_guid: Option<SyncGuid>,
|
||||
// Always 0 if parent_guid is None
|
||||
pub parent_guid: SyncGuid,
|
||||
pub position: u32,
|
||||
pub date_added: Timestamp,
|
||||
pub last_modified: Timestamp,
|
||||
pub url: Option<Url>,
|
||||
pub url: Url,
|
||||
pub title: Option<String>,
|
||||
pub child_guids: Option<Vec<SyncGuid>>,
|
||||
pub child_nodes: Option<Vec<PublicNode>>,
|
||||
}
|
||||
|
||||
impl Default for PublicNode {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
// Note: we mainly want `Default::default()` for filling in the
|
||||
// missing part of struct decls.
|
||||
node_type: BookmarkType::Separator,
|
||||
guid: SyncGuid::from(""),
|
||||
parent_guid: None,
|
||||
position: 0,
|
||||
date_added: Timestamp(0),
|
||||
last_modified: Timestamp(0),
|
||||
url: None,
|
||||
title: None,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
impl From<BookmarkData> for Item {
|
||||
fn from(b: BookmarkData) -> Self {
|
||||
Item::Bookmark { b }
|
||||
}
|
||||
}
|
||||
|
||||
// Only for tests because we ignore timestamps
|
||||
#[cfg(test)]
|
||||
impl PartialEq for BookmarkData {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.guid == other.guid
|
||||
&& self.parent_guid == other.parent_guid
|
||||
&& self.position == other.position
|
||||
&& self.url == other.url
|
||||
&& self.title == other.title
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Separator {
|
||||
pub guid: SyncGuid,
|
||||
pub date_added: Timestamp,
|
||||
pub last_modified: Timestamp,
|
||||
pub parent_guid: SyncGuid,
|
||||
pub position: u32,
|
||||
}
|
||||
|
||||
impl From<Separator> for Item {
|
||||
fn from(s: Separator) -> Self {
|
||||
Item::Separator { s }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Folder {
|
||||
pub guid: SyncGuid,
|
||||
pub date_added: Timestamp,
|
||||
pub last_modified: Timestamp,
|
||||
pub parent_guid: Option<SyncGuid>, // Option because the root is a folder but has no parent.
|
||||
// Always 0 if parent_guid is None
|
||||
pub position: u32,
|
||||
pub title: Option<String>,
|
||||
// Depending on the specific API request, either, both, or none of these `child_*` vecs
|
||||
// will be populated.
|
||||
pub child_guids: Option<Vec<SyncGuid>>,
|
||||
pub child_nodes: Option<Vec<Item>>,
|
||||
}
|
||||
|
||||
impl From<Folder> for Item {
|
||||
fn from(f: Folder) -> Self {
|
||||
Item::Folder { f }
|
||||
}
|
||||
}
|
||||
|
||||
// The type used to update the actual item.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Item {
|
||||
Bookmark { b: BookmarkData },
|
||||
Separator { s: Separator },
|
||||
Folder { f: Folder },
|
||||
}
|
||||
|
||||
// We allow all "common" fields from the sub-types to be getters on the
|
||||
// InsertableItem type.
|
||||
macro_rules! impl_common_bookmark_getter {
|
||||
($getter_name:ident, $T:ty) => {
|
||||
pub fn $getter_name(&self) -> &$T {
|
||||
match self {
|
||||
Item::Bookmark { b } => &b.$getter_name,
|
||||
Item::Separator { s } => &s.$getter_name,
|
||||
Item::Folder { f } => &f.$getter_name,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl Item {
|
||||
impl_common_bookmark_getter!(guid, SyncGuid);
|
||||
impl_common_bookmark_getter!(position, u32);
|
||||
impl_common_bookmark_getter!(date_added, Timestamp);
|
||||
impl_common_bookmark_getter!(last_modified, Timestamp);
|
||||
pub fn parent_guid(&self) -> Option<&SyncGuid> {
|
||||
match self {
|
||||
Item::Bookmark { b } => Some(&b.parent_guid),
|
||||
Item::Folder { f } => f.parent_guid.as_ref(),
|
||||
Item::Separator { s } => Some(&s.parent_guid),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PublicNode {
|
||||
fn eq(&self, other: &PublicNode) -> bool {
|
||||
// Compare everything except date_added and last_modified.
|
||||
self.node_type == other.node_type
|
||||
&& self.guid == other.guid
|
||||
&& self.parent_guid == other.parent_guid
|
||||
&& self.url == other.url
|
||||
&& self.child_guids == other.child_guids
|
||||
&& self.child_nodes == other.child_nodes
|
||||
/// No simple `From` here, because json_tree doesn't give us the parent or position - it
|
||||
/// expects us to walk a tree, so we do.
|
||||
///
|
||||
/// Extra complication for the fact the root has a None parent_guid :)
|
||||
fn folder_from_node_with_parent_info(
|
||||
f: json_tree::FolderNode,
|
||||
parent_guid: Option<SyncGuid>,
|
||||
position: u32,
|
||||
depth_left: usize,
|
||||
) -> Folder {
|
||||
let guid = f.guid.expect("all items have guids");
|
||||
// We always provide child_guids, and only provide child_nodes if we are
|
||||
// going to keep recursing.
|
||||
let child_guids = Some(
|
||||
f.children
|
||||
.iter()
|
||||
.map(|child| child.guid().clone())
|
||||
.collect(),
|
||||
);
|
||||
let child_nodes = if depth_left != 0 {
|
||||
Some(
|
||||
f.children
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(child_pos, child)| {
|
||||
item_from_node_with_parent_info(
|
||||
child,
|
||||
guid.clone(),
|
||||
child_pos as u32,
|
||||
depth_left - 1,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Folder {
|
||||
guid,
|
||||
parent_guid,
|
||||
position,
|
||||
child_nodes,
|
||||
child_guids,
|
||||
title: f.title,
|
||||
date_added: f.date_added.expect("always get dates"),
|
||||
last_modified: f.last_modified.expect("always get dates"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetch_bookmarks_by_url(db: &PlacesDb, url: &Url) -> Result<Vec<PublicNode>> {
|
||||
let nodes = get_raw_bookmarks_for_url(db, url)?
|
||||
fn item_from_node_with_parent_info(
|
||||
n: json_tree::BookmarkTreeNode,
|
||||
parent_guid: SyncGuid,
|
||||
position: u32,
|
||||
depth_left: usize,
|
||||
) -> Item {
|
||||
match n {
|
||||
json_tree::BookmarkTreeNode::Bookmark { b } => BookmarkData {
|
||||
guid: b.guid.expect("all items have guids"),
|
||||
parent_guid,
|
||||
position,
|
||||
url: b.url,
|
||||
title: b.title,
|
||||
date_added: b.date_added.expect("always get dates"),
|
||||
last_modified: b.last_modified.expect("always get dates"),
|
||||
}
|
||||
.into(),
|
||||
json_tree::BookmarkTreeNode::Separator { s } => Separator {
|
||||
guid: s.guid.expect("all items have guids"),
|
||||
parent_guid,
|
||||
position,
|
||||
date_added: s.date_added.expect("always get dates"),
|
||||
last_modified: s.last_modified.expect("always get dates"),
|
||||
}
|
||||
.into(),
|
||||
json_tree::BookmarkTreeNode::Folder { f } => {
|
||||
folder_from_node_with_parent_info(f, Some(parent_guid), position, depth_left).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Call fetch_tree_with_depth with FetchDepth::Deepest.
|
||||
/// This is the function called by the FFI when requesting the tree.
|
||||
pub fn fetch_tree(db: &PlacesDb, item_guid: &SyncGuid) -> Result<Option<Item>> {
|
||||
fetch_tree_with_depth(db, item_guid, &FetchDepth::Deepest)
|
||||
}
|
||||
|
||||
/// Call fetch_tree with a depth parameter and convert the result
|
||||
/// to an Item.
|
||||
pub fn fetch_tree_with_depth(
|
||||
db: &PlacesDb,
|
||||
item_guid: &SyncGuid,
|
||||
target_depth: &FetchDepth,
|
||||
) -> Result<Option<Item>> {
|
||||
let _tx = db.begin_transaction()?;
|
||||
let (tree, parent_guid, position) = if let Some((tree, parent_guid, position)) =
|
||||
json_tree::fetch_tree(db, item_guid, target_depth)?
|
||||
{
|
||||
(tree, parent_guid, position)
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
// parent_guid being an Option<> is a bit if a pain :(
|
||||
Ok(Some(match tree {
|
||||
json_tree::BookmarkTreeNode::Folder { f } => {
|
||||
noisy_debug_assert(
|
||||
parent_guid.is_none() ^ (f.guid.as_ref() != Some(BookmarkRootGuid::Root.guid())),
|
||||
"only root has no parent",
|
||||
);
|
||||
let depth_left = match target_depth {
|
||||
FetchDepth::Specific(v) => *v,
|
||||
FetchDepth::Deepest => usize::MAX,
|
||||
};
|
||||
folder_from_node_with_parent_info(f, parent_guid, position, depth_left).into()
|
||||
}
|
||||
_ => item_from_node_with_parent_info(
|
||||
tree,
|
||||
parent_guid.expect("must have parent"),
|
||||
position,
|
||||
0,
|
||||
),
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn fetch_bookmarks_by_url(db: &PlacesDb, url: &Url) -> Result<Vec<BookmarkData>> {
|
||||
let nodes = crate::storage::bookmarks::get_raw_bookmarks_for_url(db, url)?
|
||||
.into_iter()
|
||||
.map(|rb| {
|
||||
// Cause tests to fail, but we'd rather not panic here
|
||||
// for real.
|
||||
debug_assert_eq!(rb.child_count, 0);
|
||||
debug_assert_eq!(rb.bookmark_type, BookmarkType::Bookmark);
|
||||
debug_assert_eq!(rb.url.as_ref(), Some(url));
|
||||
PublicNode {
|
||||
node_type: rb.bookmark_type,
|
||||
noisy_debug_assert_eq(&rb.child_count, &0, "child count should be zero");
|
||||
noisy_debug_assert_eq(
|
||||
&rb.bookmark_type,
|
||||
&BookmarkType::Bookmark,
|
||||
"not a bookmark!",
|
||||
);
|
||||
// We don't log URLs so we do the comparison here.
|
||||
noisy_debug_assert(rb.url.as_ref() == Some(url), "urls don't match");
|
||||
noisy_debug_assert(rb.parent_guid.is_some(), "no parent guid");
|
||||
BookmarkData {
|
||||
guid: rb.guid,
|
||||
parent_guid: rb.parent_guid,
|
||||
parent_guid: rb
|
||||
.parent_guid
|
||||
.unwrap_or_else(|| BookmarkRootGuid::Unfiled.into()),
|
||||
position: rb.position,
|
||||
date_added: rb.date_added,
|
||||
last_modified: rb.date_modified,
|
||||
url: rb.url,
|
||||
url: url.clone(),
|
||||
title: rb.title,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -85,118 +282,48 @@ pub fn fetch_bookmarks_by_url(db: &PlacesDb, url: &Url) -> Result<Vec<PublicNode
|
|||
///
|
||||
/// If `get_direct_children` is true, it will return 1 level of folder children,
|
||||
/// otherwise it returns just their guids.
|
||||
///
|
||||
/// It also produces the protobuf message type directly, rather than
|
||||
/// add a special variant of this bookmark type just for this function.
|
||||
pub fn fetch_bookmark(
|
||||
db: &PlacesDb,
|
||||
item_guid: &SyncGuid,
|
||||
get_direct_children: bool,
|
||||
) -> Result<Option<PublicNode>> {
|
||||
) -> Result<Option<Item>> {
|
||||
let depth = if get_direct_children {
|
||||
FetchDepth::Specific(1)
|
||||
} else {
|
||||
FetchDepth::Specific(0)
|
||||
};
|
||||
let mut bookmark = match fetch_public_tree_with_depth(db, item_guid, &depth)? {
|
||||
None => return Ok(None),
|
||||
Some(b) => b,
|
||||
};
|
||||
|
||||
if get_direct_children {
|
||||
if let Some(child_nodes) = bookmark.child_nodes.as_mut() {
|
||||
for node in child_nodes {
|
||||
node.child_guids = node
|
||||
.child_nodes
|
||||
.take()
|
||||
.map(|children| children.into_iter().map(|child| child.guid).collect());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
bookmark.child_guids = bookmark
|
||||
.child_nodes
|
||||
.take()
|
||||
.map(|children| children.into_iter().map(|child| child.guid).collect());
|
||||
}
|
||||
|
||||
Ok(Some(bookmark))
|
||||
fetch_tree_with_depth(db, item_guid, &depth)
|
||||
}
|
||||
|
||||
pub fn update_bookmark_from_message(db: &PlacesDb, msg: ProtoBookmark) -> Result<()> {
|
||||
let info = conversions::BookmarkUpdateInfo::from(msg);
|
||||
|
||||
let tx = db.begin_transaction()?;
|
||||
let existing = get_raw_bookmark(db, &info.guid)?
|
||||
.ok_or_else(|| InvalidPlaceInfo::NoSuchGuid(info.guid.to_string()))?;
|
||||
let (guid, updatable) = info.into_updatable(existing.bookmark_type)?;
|
||||
|
||||
update_bookmark_in_tx(db, &guid, &updatable, existing)?;
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
fn bookmark_from_row(row: &Row<'_>) -> Result<Option<BookmarkData>> {
|
||||
Ok(
|
||||
match row
|
||||
.get::<_, Option<String>>("url")?
|
||||
.and_then(|href| url::Url::parse(&href).ok())
|
||||
{
|
||||
Some(url) => Some(BookmarkData {
|
||||
guid: row.get("guid")?,
|
||||
parent_guid: row.get("parentGuid")?,
|
||||
position: row.get("position")?,
|
||||
date_added: row.get("dateAdded")?,
|
||||
last_modified: row.get("lastModified")?,
|
||||
title: row.get("title")?,
|
||||
url,
|
||||
}),
|
||||
None => None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Call fetch_public_tree_with_depth with FetchDepth::Deepest.
|
||||
/// This is the function called by the FFI when requesting the tree.
|
||||
pub fn fetch_public_tree(db: &PlacesDb, item_guid: &SyncGuid) -> Result<Option<PublicNode>> {
|
||||
fetch_public_tree_with_depth(db, item_guid, &FetchDepth::Deepest)
|
||||
}
|
||||
|
||||
/// Call fetch_tree with a depth parameter and convert the result
|
||||
/// to a ProtoBookmark, and ensure the requested item's position
|
||||
/// and parent info are provided as well.
|
||||
pub fn fetch_public_tree_with_depth(
|
||||
db: &PlacesDb,
|
||||
item_guid: &SyncGuid,
|
||||
target_depth: &FetchDepth,
|
||||
) -> Result<Option<PublicNode>> {
|
||||
let _tx = db.begin_transaction()?;
|
||||
let (tree, parent_guid, position) =
|
||||
if let Some((tree, parent_guid, position)) = fetch_tree(db, item_guid, target_depth)? {
|
||||
(tree, parent_guid, position)
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// `position` and `parent_guid` will be handled for the children of
|
||||
// `item_guid` by `PublicNode::from` automatically, however we
|
||||
// still need to fill in it's own `parent_guid` and `position`.
|
||||
let mut proto = PublicNode::from(tree);
|
||||
|
||||
if item_guid != BookmarkRootGuid::Root {
|
||||
proto.parent_guid = parent_guid;
|
||||
proto.position = position;
|
||||
}
|
||||
Ok(Some(proto))
|
||||
}
|
||||
|
||||
pub fn search_bookmarks(db: &PlacesDb, search: &str, limit: u32) -> Result<Vec<PublicNode>> {
|
||||
pub fn search_bookmarks(db: &PlacesDb, search: &str, limit: u32) -> Result<Vec<BookmarkData>> {
|
||||
let scope = db.begin_interrupt_scope();
|
||||
Ok(db
|
||||
.query_rows_into_cached::<Vec<Option<PublicNode>>, _, _, _>(
|
||||
.query_rows_into_cached::<Vec<Option<BookmarkData>>, _, _, _>(
|
||||
&SEARCH_QUERY,
|
||||
&[(":search", &search), (":limit", &limit)],
|
||||
|row| -> Result<_> {
|
||||
scope.err_if_interrupted()?;
|
||||
Ok(
|
||||
match row
|
||||
.get::<_, Option<String>>("url")?
|
||||
.and_then(|href| url::Url::parse(&href).ok())
|
||||
{
|
||||
Some(url) => Some(PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
guid: row.get("guid")?,
|
||||
parent_guid: row.get("parentGuid")?,
|
||||
position: row.get("position")?,
|
||||
date_added: row.get("dateAdded")?,
|
||||
last_modified: row.get("lastModified")?,
|
||||
title: row.get("title")?,
|
||||
url: Some(url),
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
}),
|
||||
None => None,
|
||||
},
|
||||
)
|
||||
bookmark_from_row(row)
|
||||
},
|
||||
)?
|
||||
.into_iter()
|
||||
|
@ -204,51 +331,15 @@ pub fn search_bookmarks(db: &PlacesDb, search: &str, limit: u32) -> Result<Vec<P
|
|||
.collect())
|
||||
}
|
||||
|
||||
pub fn recent_bookmarks(db: &PlacesDb, limit: u32) -> Result<Vec<PublicNode>> {
|
||||
pub fn recent_bookmarks(db: &PlacesDb, limit: u32) -> Result<Vec<BookmarkData>> {
|
||||
let scope = db.begin_interrupt_scope();
|
||||
let sql = format!(
|
||||
"SELECT
|
||||
b.guid,
|
||||
p.guid AS parentGuid,
|
||||
b.position,
|
||||
b.dateAdded,
|
||||
b.lastModified,
|
||||
NULLIF(b.title, '') AS title,
|
||||
h.url AS url
|
||||
FROM moz_bookmarks b
|
||||
JOIN moz_bookmarks p ON p.id = b.parent
|
||||
JOIN moz_places h ON h.id = b.fk
|
||||
WHERE b.type = {bookmark_type}
|
||||
ORDER BY b.dateAdded DESC
|
||||
LIMIT :limit",
|
||||
bookmark_type = BookmarkType::Bookmark as u8,
|
||||
);
|
||||
Ok(db
|
||||
.query_rows_into_cached::<Vec<Option<PublicNode>>, _, _, _>(
|
||||
&sql,
|
||||
.query_rows_into_cached::<Vec<Option<BookmarkData>>, _, _, _>(
|
||||
&RECENT_BOOKMARKS_QUERY,
|
||||
&[(":limit", &limit)],
|
||||
|row| -> Result<_> {
|
||||
scope.err_if_interrupted()?;
|
||||
Ok(
|
||||
match row
|
||||
.get::<_, Option<String>>("url")?
|
||||
.and_then(|href| url::Url::parse(&href).ok())
|
||||
{
|
||||
Some(url) => Some(PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
guid: row.get("guid")?,
|
||||
parent_guid: row.get("parentGuid")?,
|
||||
position: row.get("position")?,
|
||||
date_added: row.get("dateAdded")?,
|
||||
last_modified: row.get("lastModified")?,
|
||||
title: row.get("title")?,
|
||||
url: Some(url),
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
}),
|
||||
None => None,
|
||||
},
|
||||
)
|
||||
bookmark_from_row(row)
|
||||
},
|
||||
)?
|
||||
.into_iter()
|
||||
|
@ -292,7 +383,26 @@ lazy_static::lazy_static! {
|
|||
match_bhvr = crate::match_impl::MatchBehavior::Anywhere as u32,
|
||||
search_bhvr = crate::match_impl::SearchBehavior::BOOKMARK.bits(),
|
||||
);
|
||||
|
||||
pub static ref RECENT_BOOKMARKS_QUERY: String = format!(
|
||||
"SELECT
|
||||
b.guid,
|
||||
p.guid AS parentGuid,
|
||||
b.position,
|
||||
b.dateAdded,
|
||||
b.lastModified,
|
||||
NULLIF(b.title, '') AS title,
|
||||
h.url AS url
|
||||
FROM moz_bookmarks b
|
||||
JOIN moz_bookmarks p ON p.id = b.parent
|
||||
JOIN moz_places h ON h.id = b.fk
|
||||
WHERE b.type = {bookmark_type}
|
||||
ORDER BY b.dateAdded DESC
|
||||
LIMIT :limit",
|
||||
bookmark_type = BookmarkType::Bookmark as u8
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
@ -336,15 +446,12 @@ mod test {
|
|||
assert_eq!(bmks.len(), 2);
|
||||
assert_eq!(
|
||||
bmks[0],
|
||||
PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
BookmarkData {
|
||||
guid: "bookmark2___".into(),
|
||||
title: Some("yes 1".into()),
|
||||
url: Some(url.clone()),
|
||||
parent_guid: Some(BookmarkRootGuid::Unfiled.into()),
|
||||
url: url.clone(),
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: 1,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
// Ignored by our PartialEq
|
||||
date_added: Timestamp(0),
|
||||
last_modified: Timestamp(0),
|
||||
|
@ -352,15 +459,12 @@ mod test {
|
|||
);
|
||||
assert_eq!(
|
||||
bmks[1],
|
||||
PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
BookmarkData {
|
||||
guid: "bookmark4___".into(),
|
||||
title: Some("yes 2".into()),
|
||||
url: Some(url),
|
||||
parent_guid: Some(BookmarkRootGuid::Unfiled.into()),
|
||||
url,
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: 3,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
// Ignored by our PartialEq
|
||||
date_added: Timestamp(0),
|
||||
last_modified: Timestamp(0),
|
||||
|
@ -457,13 +561,10 @@ mod test {
|
|||
];
|
||||
for (got, want) in bmks.iter().zip(expect.iter()) {
|
||||
assert_eq!(got.guid.as_str(), want.0);
|
||||
assert_eq!(got.url.as_ref().unwrap(), &url::Url::parse(want.1).unwrap());
|
||||
assert_eq!(got.url, url::Url::parse(want.1).unwrap());
|
||||
assert_eq!(got.title.as_ref().unwrap_or(&String::new()), want.2);
|
||||
assert_eq!(got.position, want.3);
|
||||
assert_eq!(got.parent_guid.as_ref().unwrap(), BookmarkRootGuid::Unfiled);
|
||||
assert_eq!(got.node_type, BookmarkType::Bookmark);
|
||||
assert!(got.child_guids.is_none());
|
||||
assert!(got.child_nodes.is_none());
|
||||
assert_eq!(got.parent_guid, BookmarkRootGuid::Unfiled);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -499,52 +600,78 @@ mod test {
|
|||
"badurl",
|
||||
)
|
||||
.guid;
|
||||
assert_eq!(fetch_bookmark(&conns.read, &guid_bad, false)?, None);
|
||||
assert!(fetch_bookmark(&conns.read, &guid_bad, false)?.is_none());
|
||||
|
||||
// Now fetch the entire tree.
|
||||
let root = fetch_bookmark(&conns.read, BookmarkRootGuid::Root.guid(), false)?.unwrap();
|
||||
|
||||
let root = match fetch_bookmark(&conns.read, BookmarkRootGuid::Root.guid(), false)?.unwrap()
|
||||
{
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("root not a folder?"),
|
||||
};
|
||||
assert!(root.child_guids.is_some());
|
||||
assert!(root.child_nodes.is_none());
|
||||
assert_eq!(root.child_guids.unwrap().len(), 4);
|
||||
|
||||
let root = fetch_bookmark(&conns.read, BookmarkRootGuid::Root.guid(), true)?.unwrap();
|
||||
let root = match fetch_bookmark(&conns.read, BookmarkRootGuid::Root.guid(), true)?.unwrap()
|
||||
{
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("not a folder?"),
|
||||
};
|
||||
|
||||
assert!(root.child_guids.is_none());
|
||||
assert!(root.child_nodes.is_some());
|
||||
assert!(root.child_guids.is_some());
|
||||
assert_eq!(
|
||||
root.child_guids.unwrap(),
|
||||
root.child_nodes
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| c.guid().clone())
|
||||
.collect::<Vec<SyncGuid>>()
|
||||
);
|
||||
let root_children = root.child_nodes.unwrap();
|
||||
assert_eq!(root_children.len(), 4);
|
||||
for child in root_children {
|
||||
assert!(child.child_guids.is_some());
|
||||
assert!(child.child_nodes.is_none());
|
||||
if child.guid == BookmarkRootGuid::Mobile {
|
||||
assert_eq!(
|
||||
child.child_guids.unwrap(),
|
||||
&[
|
||||
SyncGuid::from("bookmark1___"),
|
||||
SyncGuid::from("bookmark2___")
|
||||
]
|
||||
);
|
||||
} else {
|
||||
assert_eq!(child.child_guids.unwrap().len(), 0);
|
||||
match child {
|
||||
Item::Folder { f: child } => {
|
||||
assert!(child.child_guids.is_some());
|
||||
assert!(child.child_nodes.is_none());
|
||||
if child.guid == BookmarkRootGuid::Mobile {
|
||||
assert_eq!(
|
||||
child.child_guids.unwrap(),
|
||||
&[
|
||||
SyncGuid::from("bookmark1___"),
|
||||
SyncGuid::from("bookmark2___")
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => panic!("all root children should be folders"),
|
||||
}
|
||||
}
|
||||
|
||||
let unfiled =
|
||||
fetch_bookmark(&conns.read, BookmarkRootGuid::Unfiled.guid(), false)?.unwrap();
|
||||
match fetch_bookmark(&conns.read, BookmarkRootGuid::Unfiled.guid(), false)?.unwrap() {
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("not a folder?"),
|
||||
};
|
||||
|
||||
assert!(unfiled.child_guids.is_some());
|
||||
assert!(unfiled.child_nodes.is_none());
|
||||
assert_eq!(unfiled.child_guids.unwrap().len(), 0);
|
||||
|
||||
let unfiled = fetch_bookmark(&conns.read, BookmarkRootGuid::Unfiled.guid(), true)?.unwrap();
|
||||
assert!(unfiled.child_guids.is_none());
|
||||
let unfiled =
|
||||
match fetch_bookmark(&conns.read, BookmarkRootGuid::Unfiled.guid(), true)?.unwrap() {
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("not a folder?"),
|
||||
};
|
||||
assert!(unfiled.child_guids.is_some());
|
||||
assert!(unfiled.child_nodes.is_some());
|
||||
assert_eq!(unfiled.child_nodes.unwrap().len(), 0);
|
||||
|
||||
assert_eq!(
|
||||
fetch_bookmark(&conns.read, &"not_exist___".into(), true)?,
|
||||
None
|
||||
);
|
||||
assert_eq!(unfiled.child_nodes.unwrap().len(), 0);
|
||||
assert_eq!(unfiled.child_guids.unwrap().len(), 0);
|
||||
|
||||
assert!(fetch_bookmark(&conns.read, &"not_exist___".into(), true)?.is_none());
|
||||
Ok(())
|
||||
}
|
||||
#[test]
|
||||
|
@ -575,33 +702,75 @@ mod test {
|
|||
"badurl",
|
||||
);
|
||||
|
||||
let root = fetch_public_tree(&conns.read, BookmarkRootGuid::Root.guid())?.unwrap();
|
||||
let root = match fetch_tree(&conns.read, BookmarkRootGuid::Root.guid())?.unwrap() {
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("root not a folder?"),
|
||||
};
|
||||
assert!(root.parent_guid.is_none());
|
||||
assert_eq!(root.position, 0);
|
||||
|
||||
assert!(root.child_guids.is_none());
|
||||
assert!(root.child_guids.is_some());
|
||||
let children = root.child_nodes.as_ref().unwrap();
|
||||
assert_eq!(
|
||||
root.child_guids.unwrap(),
|
||||
children
|
||||
.iter()
|
||||
.map(|c| c.guid().clone())
|
||||
.collect::<Vec<SyncGuid>>()
|
||||
);
|
||||
let mut mobile_pos = None;
|
||||
for (i, c) in children.iter().enumerate() {
|
||||
assert_eq!(i as u32, c.position);
|
||||
assert_eq!(c.parent_guid.as_ref().unwrap(), &root.guid);
|
||||
assert!(c.child_guids.is_none());
|
||||
if c.guid == BookmarkRootGuid::Mobile {
|
||||
mobile_pos = Some(c.position);
|
||||
}
|
||||
for (i2, c2) in c.child_nodes.as_ref().unwrap().iter().enumerate() {
|
||||
assert_eq!(i2 as u32, c2.position);
|
||||
assert_eq!(c2.parent_guid.as_ref().unwrap(), &c.guid);
|
||||
assert_eq!(i as u32, *c.position());
|
||||
assert_eq!(c.parent_guid().unwrap(), &root.guid);
|
||||
match c {
|
||||
Item::Folder { f } => {
|
||||
// all out roots are here, so check it is mobile.
|
||||
if f.guid == BookmarkRootGuid::Mobile {
|
||||
assert!(f.child_guids.is_some());
|
||||
assert!(f.child_nodes.is_some());
|
||||
let child_nodes = f.child_nodes.as_ref().unwrap();
|
||||
assert_eq!(
|
||||
f.child_guids.as_ref().unwrap(),
|
||||
&child_nodes
|
||||
.iter()
|
||||
.map(|c| c.guid().clone())
|
||||
.collect::<Vec<SyncGuid>>()
|
||||
);
|
||||
mobile_pos = Some(i as u32);
|
||||
let b = match &child_nodes[0] {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expect a bookmark"),
|
||||
};
|
||||
assert_eq!(b.position, 0);
|
||||
assert_eq!(b.guid, SyncGuid::from("bookmark1___"));
|
||||
assert_eq!(b.url, Url::parse("https://www.example1.com/").unwrap());
|
||||
|
||||
let b = match &child_nodes[1] {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expect a bookmark"),
|
||||
};
|
||||
assert_eq!(b.position, 1);
|
||||
assert_eq!(b.guid, SyncGuid::from("bookmark2___"));
|
||||
assert_eq!(b.url, Url::parse("https://www.example2.com/").unwrap());
|
||||
}
|
||||
}
|
||||
_ => panic!("unexpected type"),
|
||||
}
|
||||
}
|
||||
// parent_guid/position for the directly returned node is filled in separately,
|
||||
// so make sure it works for non-root nodes too.
|
||||
let mobile = fetch_public_tree(&conns.read, BookmarkRootGuid::Mobile.guid())?.unwrap();
|
||||
let mobile = match fetch_tree(&conns.read, BookmarkRootGuid::Mobile.guid())?.unwrap() {
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("not a folder?"),
|
||||
};
|
||||
assert_eq!(mobile.parent_guid.unwrap(), BookmarkRootGuid::Root);
|
||||
assert_eq!(mobile.position, mobile_pos.unwrap());
|
||||
|
||||
let bm1 = fetch_public_tree(&conns.read, &SyncGuid::from("bookmark1___"))?.unwrap();
|
||||
assert_eq!(bm1.parent_guid.unwrap(), BookmarkRootGuid::Mobile);
|
||||
let bm1 = match fetch_tree(&conns.read, &SyncGuid::from("bookmark1___"))?.unwrap() {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("not a bookmark?"),
|
||||
};
|
||||
assert_eq!(bm1.parent_guid, BookmarkRootGuid::Mobile);
|
||||
assert_eq!(bm1.position, 0);
|
||||
|
||||
Ok(())
|
||||
|
@ -666,15 +835,12 @@ mod test {
|
|||
|
||||
assert_eq!(
|
||||
bmks[0],
|
||||
PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
BookmarkData {
|
||||
guid: "bookmark5___".into(),
|
||||
title: Some("b5".into()),
|
||||
url: Some(Url::parse("https://www.example5.com/").unwrap()),
|
||||
parent_guid: Some(BookmarkRootGuid::Unfiled.into()),
|
||||
url: Url::parse("https://www.example5.com/").unwrap(),
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: 5,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
// Ignored by our PartialEq
|
||||
date_added: Timestamp(0),
|
||||
last_modified: Timestamp(0),
|
||||
|
@ -682,15 +848,12 @@ mod test {
|
|||
);
|
||||
assert_eq!(
|
||||
bmks[1],
|
||||
PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
BookmarkData {
|
||||
guid: "bookmark4___".into(),
|
||||
title: Some("b4".into()),
|
||||
url: Some(Url::parse("https://www.example4.com/").unwrap()),
|
||||
parent_guid: Some(BookmarkRootGuid::Unfiled.into()),
|
||||
url: Url::parse("https://www.example4.com/").unwrap(),
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: 3,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
// Ignored by our PartialEq
|
||||
date_added: Timestamp(0),
|
||||
last_modified: Timestamp(0),
|
||||
|
@ -698,15 +861,12 @@ mod test {
|
|||
);
|
||||
assert_eq!(
|
||||
bmks[2],
|
||||
PublicNode {
|
||||
node_type: BookmarkType::Bookmark,
|
||||
BookmarkData {
|
||||
guid: "bookmark3___".into(),
|
||||
title: Some("b3".into()),
|
||||
url: Some(Url::parse("https://www.example3.com/").unwrap()),
|
||||
parent_guid: Some(BookmarkRootGuid::Unfiled.into()),
|
||||
url: Url::parse("https://www.example3.com/").unwrap(),
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: 2,
|
||||
child_guids: None,
|
||||
child_nodes: None,
|
||||
// Ignored by our PartialEq
|
||||
date_added: Timestamp(0),
|
||||
last_modified: Timestamp(0),
|
|
@ -0,0 +1,808 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
// This supports inserting and fetching an entire bookmark tree via JSON
|
||||
// compatible data structures.
|
||||
// It's currently used only by tests, examples and our utilities for importing
|
||||
// from a desktop JSON exports.
|
||||
//
|
||||
// None of our "real" consumers currently require JSON compatibility, so try
|
||||
// and avoid using this if you can!
|
||||
// (We could possibly put this behind a feature flag?)
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::types::BookmarkType;
|
||||
//#[cfg(test)]
|
||||
use crate::db::PlacesDb;
|
||||
use rusqlite::Row;
|
||||
use sql_support::ConnExt;
|
||||
use std::collections::HashMap;
|
||||
use sync_guid::Guid as SyncGuid;
|
||||
use types::Timestamp;
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
BookmarkPosition, InsertableBookmark, InsertableFolder, InsertableItem, InsertableSeparator,
|
||||
RowId,
|
||||
};
|
||||
|
||||
use serde::{
|
||||
de::{Deserialize, Deserializer},
|
||||
ser::{Serialize, SerializeStruct, Serializer},
|
||||
};
|
||||
use serde_derive::*;
|
||||
|
||||
/// Support for inserting and fetching a tree. Same limitations as desktop.
|
||||
/// Note that the guids are optional when inserting a tree. They will always
|
||||
/// have values when fetching it.
|
||||
|
||||
// For testing purposes we implement PartialEq, such that optional fields are
|
||||
// ignored in the comparison. This allows tests to construct a tree with
|
||||
// missing fields and be able to compare against a tree with all fields (such
|
||||
// as one exported from the DB)
|
||||
#[cfg(test)]
|
||||
fn cmp_options<T: PartialEq>(s: &Option<T>, o: &Option<T>) -> bool {
|
||||
match (s, o) {
|
||||
(None, None) => true,
|
||||
(None, Some(_)) => true,
|
||||
(Some(_), None) => true,
|
||||
(s, o) => s == o,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BookmarkNode {
|
||||
pub guid: Option<SyncGuid>,
|
||||
pub date_added: Option<Timestamp>,
|
||||
pub last_modified: Option<Timestamp>,
|
||||
pub title: Option<String>,
|
||||
pub url: Url,
|
||||
}
|
||||
|
||||
impl From<BookmarkNode> for BookmarkTreeNode {
|
||||
fn from(b: BookmarkNode) -> Self {
|
||||
BookmarkTreeNode::Bookmark { b }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl PartialEq for BookmarkNode {
|
||||
fn eq(&self, other: &BookmarkNode) -> bool {
|
||||
cmp_options(&self.guid, &other.guid)
|
||||
&& cmp_options(&self.date_added, &other.date_added)
|
||||
&& cmp_options(&self.last_modified, &other.last_modified)
|
||||
&& cmp_options(&self.title, &other.title)
|
||||
&& self.url == other.url
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SeparatorNode {
|
||||
pub guid: Option<SyncGuid>,
|
||||
pub date_added: Option<Timestamp>,
|
||||
pub last_modified: Option<Timestamp>,
|
||||
}
|
||||
|
||||
impl From<SeparatorNode> for BookmarkTreeNode {
|
||||
fn from(s: SeparatorNode) -> Self {
|
||||
BookmarkTreeNode::Separator { s }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl PartialEq for SeparatorNode {
|
||||
fn eq(&self, other: &SeparatorNode) -> bool {
|
||||
cmp_options(&self.guid, &other.guid)
|
||||
&& cmp_options(&self.date_added, &other.date_added)
|
||||
&& cmp_options(&self.last_modified, &other.last_modified)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct FolderNode {
|
||||
pub guid: Option<SyncGuid>,
|
||||
pub date_added: Option<Timestamp>,
|
||||
pub last_modified: Option<Timestamp>,
|
||||
pub title: Option<String>,
|
||||
pub children: Vec<BookmarkTreeNode>,
|
||||
}
|
||||
|
||||
impl From<FolderNode> for BookmarkTreeNode {
|
||||
fn from(f: FolderNode) -> Self {
|
||||
BookmarkTreeNode::Folder { f }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl PartialEq for FolderNode {
|
||||
fn eq(&self, other: &FolderNode) -> bool {
|
||||
cmp_options(&self.guid, &other.guid)
|
||||
&& cmp_options(&self.date_added, &other.date_added)
|
||||
&& cmp_options(&self.last_modified, &other.last_modified)
|
||||
&& cmp_options(&self.title, &other.title)
|
||||
&& self.children == other.children
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(test, derive(PartialEq))]
|
||||
pub enum BookmarkTreeNode {
|
||||
Bookmark { b: BookmarkNode },
|
||||
Separator { s: SeparatorNode },
|
||||
Folder { f: FolderNode },
|
||||
}
|
||||
|
||||
impl BookmarkTreeNode {
|
||||
pub fn node_type(&self) -> BookmarkType {
|
||||
match self {
|
||||
BookmarkTreeNode::Bookmark { .. } => BookmarkType::Bookmark,
|
||||
BookmarkTreeNode::Folder { .. } => BookmarkType::Folder,
|
||||
BookmarkTreeNode::Separator { .. } => BookmarkType::Separator,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn guid(&self) -> &SyncGuid {
|
||||
let guid = match self {
|
||||
BookmarkTreeNode::Bookmark { b } => b.guid.as_ref(),
|
||||
BookmarkTreeNode::Folder { f } => f.guid.as_ref(),
|
||||
BookmarkTreeNode::Separator { s } => s.guid.as_ref(),
|
||||
};
|
||||
// Can this happen? Why is this an Option?
|
||||
guid.expect("Missing guid?")
|
||||
}
|
||||
|
||||
pub fn created_modified(&self) -> (Timestamp, Timestamp) {
|
||||
let (created, modified) = match self {
|
||||
BookmarkTreeNode::Bookmark { b } => (b.date_added, b.last_modified),
|
||||
BookmarkTreeNode::Folder { f } => (f.date_added, f.last_modified),
|
||||
BookmarkTreeNode::Separator { s } => (s.date_added, s.last_modified),
|
||||
};
|
||||
(
|
||||
created.unwrap_or_else(Timestamp::now),
|
||||
modified.unwrap_or_else(Timestamp::now),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Serde makes it tricky to serialize what we need here - a 'type' from the
|
||||
// enum and then a flattened variant struct. So we gotta do it manually.
|
||||
impl Serialize for BookmarkTreeNode {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut state = serializer.serialize_struct("BookmarkTreeNode", 2)?;
|
||||
match self {
|
||||
BookmarkTreeNode::Bookmark { b } => {
|
||||
state.serialize_field("type", &BookmarkType::Bookmark)?;
|
||||
state.serialize_field("guid", &b.guid)?;
|
||||
state.serialize_field("date_added", &b.date_added)?;
|
||||
state.serialize_field("last_modified", &b.last_modified)?;
|
||||
state.serialize_field("title", &b.title)?;
|
||||
state.serialize_field("url", &b.url.to_string())?;
|
||||
}
|
||||
BookmarkTreeNode::Separator { s } => {
|
||||
state.serialize_field("type", &BookmarkType::Separator)?;
|
||||
state.serialize_field("guid", &s.guid)?;
|
||||
state.serialize_field("date_added", &s.date_added)?;
|
||||
state.serialize_field("last_modified", &s.last_modified)?;
|
||||
}
|
||||
BookmarkTreeNode::Folder { f } => {
|
||||
state.serialize_field("type", &BookmarkType::Folder)?;
|
||||
state.serialize_field("guid", &f.guid)?;
|
||||
state.serialize_field("date_added", &f.date_added)?;
|
||||
state.serialize_field("last_modified", &f.last_modified)?;
|
||||
state.serialize_field("title", &f.title)?;
|
||||
state.serialize_field("children", &f.children)?;
|
||||
}
|
||||
};
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for BookmarkTreeNode {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
// *sob* - a union of fields we post-process.
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
#[serde(default)]
|
||||
struct Mapping {
|
||||
#[serde(rename = "type")]
|
||||
bookmark_type: u8,
|
||||
guid: Option<SyncGuid>,
|
||||
date_added: Option<Timestamp>,
|
||||
last_modified: Option<Timestamp>,
|
||||
title: Option<String>,
|
||||
url: Option<String>,
|
||||
children: Vec<BookmarkTreeNode>,
|
||||
}
|
||||
let m = Mapping::deserialize(deserializer)?;
|
||||
|
||||
let url = m.url.as_ref().and_then(|u| match Url::parse(u) {
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"ignoring invalid url for {}: {:?}",
|
||||
m.guid.as_ref().map(AsRef::as_ref).unwrap_or("<no guid>"),
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
Ok(parsed) => Some(parsed),
|
||||
});
|
||||
|
||||
let bookmark_type = BookmarkType::from_u8_with_valid_url(m.bookmark_type, || url.is_some());
|
||||
Ok(match bookmark_type {
|
||||
BookmarkType::Bookmark => BookmarkNode {
|
||||
guid: m.guid,
|
||||
date_added: m.date_added,
|
||||
last_modified: m.last_modified,
|
||||
title: m.title,
|
||||
url: url.unwrap(),
|
||||
}
|
||||
.into(),
|
||||
BookmarkType::Separator => SeparatorNode {
|
||||
guid: m.guid,
|
||||
date_added: m.date_added,
|
||||
last_modified: m.last_modified,
|
||||
}
|
||||
.into(),
|
||||
BookmarkType::Folder => FolderNode {
|
||||
guid: m.guid,
|
||||
date_added: m.date_added,
|
||||
last_modified: m.last_modified,
|
||||
title: m.title,
|
||||
children: m.children,
|
||||
}
|
||||
.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BookmarkTreeNode> for InsertableItem {
|
||||
fn from(node: BookmarkTreeNode) -> Self {
|
||||
match node {
|
||||
BookmarkTreeNode::Bookmark { b } => InsertableBookmark {
|
||||
parent_guid: SyncGuid::empty(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: b.date_added,
|
||||
last_modified: b.last_modified,
|
||||
guid: b.guid,
|
||||
url: b.url,
|
||||
title: b.title,
|
||||
}
|
||||
.into(),
|
||||
BookmarkTreeNode::Separator { s } => InsertableSeparator {
|
||||
parent_guid: SyncGuid::empty(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: s.date_added,
|
||||
last_modified: s.last_modified,
|
||||
guid: s.guid,
|
||||
}
|
||||
.into(),
|
||||
BookmarkTreeNode::Folder { f } => InsertableFolder {
|
||||
parent_guid: SyncGuid::empty(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: f.date_added,
|
||||
last_modified: f.last_modified,
|
||||
guid: f.guid,
|
||||
title: f.title,
|
||||
children: f.children.into_iter().map(Into::into).collect(),
|
||||
}
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_serialize {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_tree_serialize() -> Result<()> {
|
||||
let guid = SyncGuid::random();
|
||||
let tree = BookmarkTreeNode::Folder {
|
||||
f: FolderNode {
|
||||
guid: Some(guid.clone()),
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
title: None,
|
||||
children: vec![BookmarkTreeNode::Bookmark {
|
||||
b: BookmarkNode {
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
title: Some("the bookmark".into()),
|
||||
url: Url::parse("https://www.example.com")?,
|
||||
},
|
||||
}],
|
||||
},
|
||||
};
|
||||
// round-trip the tree via serde.
|
||||
let json = serde_json::to_string_pretty(&tree)?;
|
||||
let deser: BookmarkTreeNode = serde_json::from_str(&json)?;
|
||||
assert_eq!(tree, deser);
|
||||
// and check against the simplest json repr of the tree, which checks
|
||||
// our PartialEq implementation.
|
||||
let jtree = json!({
|
||||
"type": 2,
|
||||
"guid": &guid,
|
||||
"children" : [
|
||||
{
|
||||
"type": 1,
|
||||
"title": "the bookmark",
|
||||
"url": "https://www.example.com/"
|
||||
}
|
||||
]
|
||||
});
|
||||
let deser_tree: BookmarkTreeNode = serde_json::from_value(jtree).expect("should deser");
|
||||
assert_eq!(tree, deser_tree);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tree_invalid() {
|
||||
let jtree = json!({
|
||||
"type": 2,
|
||||
"children" : [
|
||||
{
|
||||
"type": 1,
|
||||
"title": "bookmark with invalid URL",
|
||||
"url": "invalid_url"
|
||||
},
|
||||
{
|
||||
"type": 1,
|
||||
"title": "bookmark with missing URL",
|
||||
},
|
||||
{
|
||||
"title": "bookmark with missing type, no URL",
|
||||
},
|
||||
{
|
||||
"title": "bookmark with missing type, valid URL",
|
||||
"url": "http://example.com"
|
||||
},
|
||||
|
||||
]
|
||||
});
|
||||
let deser_tree: BookmarkTreeNode = serde_json::from_value(jtree).expect("should deser");
|
||||
let folder = match deser_tree {
|
||||
BookmarkTreeNode::Folder { f } => f,
|
||||
_ => panic!("must be a folder"),
|
||||
};
|
||||
|
||||
let children = folder.children;
|
||||
assert_eq!(children.len(), 4);
|
||||
|
||||
assert!(match &children[0] {
|
||||
BookmarkTreeNode::Folder { f } =>
|
||||
f.title == Some("bookmark with invalid URL".to_string()),
|
||||
_ => false,
|
||||
});
|
||||
assert!(match &children[1] {
|
||||
BookmarkTreeNode::Folder { f } =>
|
||||
f.title == Some("bookmark with missing URL".to_string()),
|
||||
_ => false,
|
||||
});
|
||||
assert!(match &children[2] {
|
||||
BookmarkTreeNode::Folder { f } => {
|
||||
f.title == Some("bookmark with missing type, no URL".to_string())
|
||||
}
|
||||
_ => false,
|
||||
});
|
||||
assert!(match &children[3] {
|
||||
BookmarkTreeNode::Bookmark { b } => {
|
||||
b.title == Some("bookmark with missing type, valid URL".to_string())
|
||||
}
|
||||
_ => false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_tree(db: &PlacesDb, tree: FolderNode) -> Result<()> {
|
||||
// This API is strange - we don't add `tree`, but just use it for the parent.
|
||||
// It's only used for json importing, so we can live with a strange API :)
|
||||
let parent = tree.guid.expect("inserting a tree without the root guid");
|
||||
let tx = db.begin_transaction()?;
|
||||
for child in tree.children {
|
||||
let mut insertable: InsertableItem = child.into();
|
||||
assert!(
|
||||
insertable.parent_guid().is_empty(),
|
||||
"can't specify a parent inserting a tree"
|
||||
);
|
||||
insertable.set_parent_guid(parent.clone());
|
||||
crate::storage::bookmarks::insert_bookmark_in_tx(db, insertable)?;
|
||||
}
|
||||
crate::storage::delete_pending_temp_tables(db)?;
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn inflate(
|
||||
parent: &mut BookmarkTreeNode,
|
||||
pseudo_tree: &mut HashMap<SyncGuid, Vec<BookmarkTreeNode>>,
|
||||
) {
|
||||
if let BookmarkTreeNode::Folder { f: parent } = parent {
|
||||
if let Some(children) = parent
|
||||
.guid
|
||||
.as_ref()
|
||||
.and_then(|guid| pseudo_tree.remove(guid))
|
||||
{
|
||||
parent.children = children;
|
||||
for child in &mut parent.children {
|
||||
inflate(child, pseudo_tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FetchedTreeRow {
|
||||
level: u32,
|
||||
_id: RowId,
|
||||
guid: SyncGuid,
|
||||
// parent and parent_guid are Option<> only to handle the root - we would
|
||||
// assert but they aren't currently used.
|
||||
_parent: Option<RowId>,
|
||||
parent_guid: Option<SyncGuid>,
|
||||
node_type: BookmarkType,
|
||||
position: u32,
|
||||
title: Option<String>,
|
||||
date_added: Timestamp,
|
||||
last_modified: Timestamp,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
impl FetchedTreeRow {
|
||||
pub fn from_row(row: &Row<'_>) -> Result<Self> {
|
||||
let url = row.get::<_, Option<String>>("url")?;
|
||||
Ok(Self {
|
||||
level: row.get("level")?,
|
||||
_id: row.get::<_, RowId>("id")?,
|
||||
guid: row.get::<_, String>("guid")?.into(),
|
||||
_parent: row.get::<_, Option<RowId>>("parent")?,
|
||||
parent_guid: row
|
||||
.get::<_, Option<String>>("parentGuid")?
|
||||
.map(SyncGuid::from),
|
||||
node_type: BookmarkType::from_u8_with_valid_url(row.get::<_, u8>("type")?, || {
|
||||
url.is_some()
|
||||
}),
|
||||
position: row.get("position")?,
|
||||
title: row.get::<_, Option<String>>("title")?,
|
||||
date_added: row.get("dateAdded")?,
|
||||
last_modified: row.get("lastModified")?,
|
||||
url,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch the tree starting at the specified guid.
|
||||
/// Returns a `BookmarkTreeNode`, its parent's guid (if any), and
|
||||
/// position inside its parent.
|
||||
pub enum FetchDepth {
|
||||
Specific(usize),
|
||||
Deepest,
|
||||
}
|
||||
|
||||
pub fn fetch_tree(
|
||||
db: &PlacesDb,
|
||||
item_guid: &SyncGuid,
|
||||
target_depth: &FetchDepth,
|
||||
) -> Result<Option<(BookmarkTreeNode, Option<SyncGuid>, u32)>> {
|
||||
// XXX - this needs additional work for tags - unlike desktop, there's no
|
||||
// "tags" folder, but instead a couple of tables to join on.
|
||||
let sql = r#"
|
||||
WITH RECURSIVE
|
||||
descendants(fk, level, type, id, guid, parent, parentGuid, position,
|
||||
title, dateAdded, lastModified) AS (
|
||||
SELECT b1.fk, 0, b1.type, b1.id, b1.guid, b1.parent,
|
||||
(SELECT guid FROM moz_bookmarks WHERE id = b1.parent),
|
||||
b1.position, b1.title, b1.dateAdded, b1.lastModified
|
||||
FROM moz_bookmarks b1 WHERE b1.guid=:item_guid
|
||||
UNION ALL
|
||||
SELECT b2.fk, level + 1, b2.type, b2.id, b2.guid, b2.parent,
|
||||
descendants.guid, b2.position, b2.title, b2.dateAdded,
|
||||
b2.lastModified
|
||||
FROM moz_bookmarks b2
|
||||
JOIN descendants ON b2.parent = descendants.id) -- AND b2.id <> :tags_folder)
|
||||
SELECT d.level, d.id, d.guid, d.parent, d.parentGuid, d.type,
|
||||
d.position, NULLIF(d.title, '') AS title, d.dateAdded,
|
||||
d.lastModified, h.url
|
||||
-- (SELECT icon_url FROM moz_icons i
|
||||
-- JOIN moz_icons_to_pages ON icon_id = i.id
|
||||
-- JOIN moz_pages_w_icons pi ON page_id = pi.id
|
||||
-- WHERE pi.page_url_hash = hash(h.url) AND pi.page_url = h.url
|
||||
-- ORDER BY width DESC LIMIT 1) AS iconuri,
|
||||
-- (SELECT GROUP_CONCAT(t.title, ',')
|
||||
-- FROM moz_bookmarks b2
|
||||
-- JOIN moz_bookmarks t ON t.id = +b2.parent AND t.parent = :tags_folder
|
||||
-- WHERE b2.fk = h.id
|
||||
-- ) AS tags,
|
||||
-- EXISTS (SELECT 1 FROM moz_items_annos
|
||||
-- WHERE item_id = d.id LIMIT 1) AS has_annos,
|
||||
-- (SELECT a.content FROM moz_annos a
|
||||
-- JOIN moz_anno_attributes n ON a.anno_attribute_id = n.id
|
||||
-- WHERE place_id = h.id AND n.name = :charset_anno
|
||||
-- ) AS charset
|
||||
FROM descendants d
|
||||
LEFT JOIN moz_bookmarks b3 ON b3.id = d.parent
|
||||
LEFT JOIN moz_places h ON h.id = d.fk
|
||||
ORDER BY d.level, d.parent, d.position"#;
|
||||
|
||||
let scope = db.begin_interrupt_scope();
|
||||
|
||||
let mut stmt = db.conn().prepare(sql)?;
|
||||
|
||||
let mut results =
|
||||
stmt.query_and_then_named(&[(":item_guid", item_guid)], FetchedTreeRow::from_row)?;
|
||||
|
||||
let parent_guid: Option<SyncGuid>;
|
||||
let position: u32;
|
||||
|
||||
// The first row in the result set is always the root of our tree.
|
||||
let mut root = match results.next() {
|
||||
Some(result) => {
|
||||
let row = result?;
|
||||
parent_guid = row.parent_guid.clone();
|
||||
position = row.position;
|
||||
match row.node_type {
|
||||
BookmarkType::Folder => FolderNode {
|
||||
guid: Some(row.guid.clone()),
|
||||
date_added: Some(row.date_added),
|
||||
last_modified: Some(row.last_modified),
|
||||
title: row.title,
|
||||
children: Vec::new(),
|
||||
}
|
||||
.into(),
|
||||
BookmarkType::Bookmark => {
|
||||
// pretend invalid or missing URLs don't exist.
|
||||
match row.url {
|
||||
Some(str_val) => match Url::parse(str_val.as_str()) {
|
||||
// an invalid URL presumably means a logic error
|
||||
// somewhere far away from here...
|
||||
Err(_) => return Ok(None),
|
||||
Ok(url) => BookmarkNode {
|
||||
guid: Some(row.guid.clone()),
|
||||
date_added: Some(row.date_added),
|
||||
last_modified: Some(row.last_modified),
|
||||
title: row.title,
|
||||
url,
|
||||
}
|
||||
.into(),
|
||||
},
|
||||
// This is double-extra-invalid because various
|
||||
// constaints in the schema should prevent it (but we
|
||||
// know from desktop's experience that on-disk
|
||||
// corruption can cause it, so it's possible) - but
|
||||
// we treat it as an `error` rather than just a `warn`
|
||||
None => {
|
||||
log::error!("bookmark {:#} has missing url", row.guid);
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
BookmarkType::Separator => SeparatorNode {
|
||||
guid: Some(row.guid.clone()),
|
||||
date_added: Some(row.date_added),
|
||||
last_modified: Some(row.last_modified),
|
||||
}
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
// Skip the rest and return if root is not a folder
|
||||
if let BookmarkTreeNode::Bookmark { .. } | BookmarkTreeNode::Separator { .. } = root {
|
||||
return Ok(Some((root, parent_guid, position)));
|
||||
}
|
||||
|
||||
scope.err_if_interrupted()?;
|
||||
// For all remaining rows, build a pseudo-tree that maps parent GUIDs to
|
||||
// ordered children. We need this intermediate step because SQLite returns
|
||||
// results in level order, so we'll see a node's siblings and cousins (same
|
||||
// level, but different parents) before any of their descendants.
|
||||
let mut pseudo_tree: HashMap<SyncGuid, Vec<BookmarkTreeNode>> = HashMap::new();
|
||||
for result in results {
|
||||
let row = result?;
|
||||
scope.err_if_interrupted()?;
|
||||
// Check if we have done fetching the asked depth
|
||||
if let FetchDepth::Specific(d) = *target_depth {
|
||||
if row.level as usize > d + 1 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let node = match row.node_type {
|
||||
BookmarkType::Bookmark => match &row.url {
|
||||
Some(url_str) => match Url::parse(url_str) {
|
||||
Ok(url) => BookmarkNode {
|
||||
guid: Some(row.guid.clone()),
|
||||
date_added: Some(row.date_added),
|
||||
last_modified: Some(row.last_modified),
|
||||
title: row.title.clone(),
|
||||
url,
|
||||
}
|
||||
.into(),
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"ignoring malformed bookmark {} - invalid URL: {:?}",
|
||||
row.guid,
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
log::warn!("ignoring malformed bookmark {} - no URL", row.guid);
|
||||
continue;
|
||||
}
|
||||
},
|
||||
BookmarkType::Separator => SeparatorNode {
|
||||
guid: Some(row.guid.clone()),
|
||||
date_added: Some(row.date_added),
|
||||
last_modified: Some(row.last_modified),
|
||||
}
|
||||
.into(),
|
||||
BookmarkType::Folder => FolderNode {
|
||||
guid: Some(row.guid.clone()),
|
||||
date_added: Some(row.date_added),
|
||||
last_modified: Some(row.last_modified),
|
||||
title: row.title.clone(),
|
||||
children: Vec::new(),
|
||||
}
|
||||
.into(),
|
||||
};
|
||||
if let Some(parent_guid) = row.parent_guid.as_ref().cloned() {
|
||||
let children = pseudo_tree.entry(parent_guid).or_default();
|
||||
children.push(node);
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, inflate our tree.
|
||||
inflate(&mut root, &mut pseudo_tree);
|
||||
Ok(Some((root, parent_guid, position)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::api::places_api::test::new_mem_connection;
|
||||
use crate::storage::bookmarks::BookmarkRootGuid;
|
||||
use crate::tests::{assert_json_tree, assert_json_tree_with_depth};
|
||||
use serde_json::json;
|
||||
|
||||
// These tests check the SQL that this JSON module does "behind the back" of the
|
||||
// main storage API.
|
||||
#[test]
|
||||
fn test_fetch_root() -> Result<()> {
|
||||
let conn = new_mem_connection();
|
||||
|
||||
// Fetch the root
|
||||
let (t, _, _) =
|
||||
fetch_tree(&conn, &BookmarkRootGuid::Root.into(), &FetchDepth::Deepest)?.unwrap();
|
||||
let f = match t {
|
||||
BookmarkTreeNode::Folder { ref f } => f,
|
||||
_ => panic!("tree root must be a folder"),
|
||||
};
|
||||
assert_eq!(f.guid, Some(BookmarkRootGuid::Root.into()));
|
||||
assert_eq!(f.children.len(), 4);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_tree_and_fetch_level() -> Result<()> {
|
||||
let conn = new_mem_connection();
|
||||
|
||||
let tree = FolderNode {
|
||||
guid: Some(BookmarkRootGuid::Unfiled.into()),
|
||||
children: vec![
|
||||
BookmarkNode {
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
title: Some("the bookmark".into()),
|
||||
url: Url::parse("https://www.example.com")?,
|
||||
}
|
||||
.into(),
|
||||
FolderNode {
|
||||
title: Some("A folder".into()),
|
||||
children: vec![
|
||||
BookmarkNode {
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
title: Some("bookmark 1 in A folder".into()),
|
||||
url: Url::parse("https://www.example2.com")?,
|
||||
}
|
||||
.into(),
|
||||
BookmarkNode {
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
title: Some("bookmark 2 in A folder".into()),
|
||||
url: Url::parse("https://www.example3.com")?,
|
||||
}
|
||||
.into(),
|
||||
],
|
||||
..Default::default()
|
||||
}
|
||||
.into(),
|
||||
BookmarkNode {
|
||||
guid: None,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
title: Some("another bookmark".into()),
|
||||
url: Url::parse("https://www.example4.com")?,
|
||||
}
|
||||
.into(),
|
||||
],
|
||||
..Default::default()
|
||||
};
|
||||
insert_tree(&conn, tree)?;
|
||||
|
||||
let expected = json!({
|
||||
"guid": &BookmarkRootGuid::Unfiled.as_guid(),
|
||||
"children": [
|
||||
{
|
||||
"title": "the bookmark",
|
||||
"url": "https://www.example.com/"
|
||||
},
|
||||
{
|
||||
"title": "A folder",
|
||||
"children": [
|
||||
{
|
||||
"title": "bookmark 1 in A folder",
|
||||
"url": "https://www.example2.com/"
|
||||
},
|
||||
{
|
||||
"title": "bookmark 2 in A folder",
|
||||
"url": "https://www.example3.com/"
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"title": "another bookmark",
|
||||
"url": "https://www.example4.com/",
|
||||
}
|
||||
]
|
||||
});
|
||||
// check it with deepest fetching level.
|
||||
assert_json_tree(&conn, &BookmarkRootGuid::Unfiled.into(), expected.clone());
|
||||
|
||||
// check it with one level deep, which should be the same as the previous
|
||||
assert_json_tree_with_depth(
|
||||
&conn,
|
||||
&BookmarkRootGuid::Unfiled.into(),
|
||||
expected,
|
||||
&FetchDepth::Specific(1),
|
||||
);
|
||||
|
||||
// check it with zero level deep, which should return root and its children only
|
||||
assert_json_tree_with_depth(
|
||||
&conn,
|
||||
&BookmarkRootGuid::Unfiled.into(),
|
||||
json!({
|
||||
"guid": &BookmarkRootGuid::Unfiled.as_guid(),
|
||||
"children": [
|
||||
{
|
||||
"title": "the bookmark",
|
||||
"url": "https://www.example.com/"
|
||||
},
|
||||
{
|
||||
"title": "A folder",
|
||||
"children": [],
|
||||
},
|
||||
{
|
||||
"title": "another bookmark",
|
||||
"url": "https://www.example4.com/",
|
||||
}
|
||||
]
|
||||
}),
|
||||
&FetchDepth::Specific(0),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -5,15 +5,12 @@
|
|||
use super::{fetch_page_info, new_page_info, PageInfo, RowId};
|
||||
use crate::db::PlacesDb;
|
||||
use crate::error::Result;
|
||||
use crate::ffi::{HistoryVisitInfo, HistoryVisitInfosWithBound, TopFrecentSiteInfo};
|
||||
use crate::frecency;
|
||||
use crate::hash;
|
||||
use crate::history_sync::engine::{
|
||||
COLLECTION_SYNCID_META_KEY, GLOBAL_SYNCID_META_KEY, LAST_SYNC_META_KEY,
|
||||
};
|
||||
use crate::msg_types::{
|
||||
HistoryVisitInfo, HistoryVisitInfos, HistoryVisitInfosWithBound, TopFrecentSiteInfo,
|
||||
TopFrecentSiteInfos,
|
||||
};
|
||||
use crate::observation::VisitObservation;
|
||||
use crate::storage::{delete_meta, delete_pending_temp_tables, get_meta, put_meta};
|
||||
use crate::types::{SyncStatus, VisitTransition, VisitTransitionSet};
|
||||
|
@ -48,27 +45,25 @@ pub fn apply_observation_direct(
|
|||
db: &PlacesDb,
|
||||
visit_ob: VisitObservation,
|
||||
) -> Result<Option<RowId>> {
|
||||
let url = Url::parse(&visit_ob.url)?;
|
||||
// Don't insert urls larger than our length max.
|
||||
if url.as_str().len() > super::URL_LENGTH_MAX {
|
||||
if visit_ob.url.as_str().len() > super::URL_LENGTH_MAX {
|
||||
return Ok(None);
|
||||
}
|
||||
// Make sure we have a valid preview URL - it should parse, and not exceed max size.
|
||||
// In case the URL is too long, ignore it and proceed with the rest of the observation.
|
||||
// In case the URL is entirely invalid, let the caller know by failing.
|
||||
let preview_image_url = if let Some(ref piu) = visit_ob.preview_image_url {
|
||||
let url = Url::parse(piu)?;
|
||||
if url.as_str().len() > super::URL_LENGTH_MAX {
|
||||
if piu.as_str().len() > super::URL_LENGTH_MAX {
|
||||
None
|
||||
} else {
|
||||
Some(url)
|
||||
Some(piu.clone())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut page_info = match fetch_page_info(db, &url)? {
|
||||
let mut page_info = match fetch_page_info(db, &visit_ob.url)? {
|
||||
Some(info) => info.page,
|
||||
None => new_page_info(db, &url, None)?,
|
||||
None => new_page_info(db, &visit_ob.url, None)?,
|
||||
};
|
||||
let mut update_change_counter = false;
|
||||
let mut update_frec = false;
|
||||
|
@ -1223,7 +1218,7 @@ pub fn get_top_frecent_site_infos(
|
|||
db: &PlacesDb,
|
||||
num_items: i32,
|
||||
frecency_threshold: i64,
|
||||
) -> Result<TopFrecentSiteInfos> {
|
||||
) -> Result<Vec<TopFrecentSiteInfo>> {
|
||||
// Get the complement of the visit types that should be excluded.
|
||||
let allowed_types = VisitTransitionSet::for_specific(&[
|
||||
VisitTransition::Download,
|
||||
|
@ -1257,7 +1252,7 @@ pub fn get_top_frecent_site_infos(
|
|||
},
|
||||
TopFrecentSiteInfo::from_row,
|
||||
)?;
|
||||
Ok(TopFrecentSiteInfos { infos })
|
||||
Ok(infos)
|
||||
}
|
||||
|
||||
pub fn get_visit_infos(
|
||||
|
@ -1265,7 +1260,7 @@ pub fn get_visit_infos(
|
|||
start: Timestamp,
|
||||
end: Timestamp,
|
||||
exclude_types: VisitTransitionSet,
|
||||
) -> Result<HistoryVisitInfos> {
|
||||
) -> Result<Vec<HistoryVisitInfo>> {
|
||||
let allowed_types = exclude_types.complement();
|
||||
let infos = db.query_rows_and_then_named_cached(
|
||||
"SELECT h.url, h.title, v.visit_date, v.visit_type, h.hidden, h.preview_image_url
|
||||
|
@ -1283,7 +1278,7 @@ pub fn get_visit_infos(
|
|||
},
|
||||
HistoryVisitInfo::from_row,
|
||||
)?;
|
||||
Ok(HistoryVisitInfos { infos })
|
||||
Ok(infos)
|
||||
}
|
||||
|
||||
pub fn get_visit_count(db: &PlacesDb, exclude_types: VisitTransitionSet) -> Result<i64> {
|
||||
|
@ -1310,7 +1305,7 @@ pub fn get_visit_page(
|
|||
offset: i64,
|
||||
count: i64,
|
||||
exclude_types: VisitTransitionSet,
|
||||
) -> Result<HistoryVisitInfos> {
|
||||
) -> Result<Vec<HistoryVisitInfo>> {
|
||||
let allowed_types = exclude_types.complement();
|
||||
let infos = db.query_rows_and_then_named_cached(
|
||||
"SELECT h.url, h.title, v.visit_date, v.visit_type, h.hidden, h.preview_image_url
|
||||
|
@ -1329,7 +1324,7 @@ pub fn get_visit_page(
|
|||
},
|
||||
HistoryVisitInfo::from_row,
|
||||
)?;
|
||||
Ok(HistoryVisitInfos { infos })
|
||||
Ok(infos)
|
||||
}
|
||||
|
||||
pub fn get_visit_page_with_bound(
|
||||
|
@ -1361,7 +1356,7 @@ pub fn get_visit_page_with_bound(
|
|||
)?;
|
||||
|
||||
if let Some(l) = infos.last() {
|
||||
if l.timestamp == bound {
|
||||
if l.timestamp.as_millis_i64() == bound {
|
||||
// all items' timestamp are equal to the previous bound
|
||||
let offset = offset + infos.len() as i64;
|
||||
Ok(HistoryVisitInfosWithBound {
|
||||
|
@ -1378,7 +1373,7 @@ pub fn get_visit_page_with_bound(
|
|||
.count() as i64;
|
||||
Ok(HistoryVisitInfosWithBound {
|
||||
infos,
|
||||
bound,
|
||||
bound: bound.as_millis_i64(),
|
||||
offset,
|
||||
})
|
||||
}
|
||||
|
@ -1854,8 +1849,9 @@ mod tests {
|
|||
// An observation with just a preview_image_url should not update it.
|
||||
apply_observation(
|
||||
&conn,
|
||||
VisitObservation::new(pi.url.clone())
|
||||
.with_preview_image_url(Some("https://www.example.com/preview.png".to_string())),
|
||||
VisitObservation::new(pi.url.clone()).with_preview_image_url(Some(
|
||||
Url::parse("https://www.example.com/preview.png").unwrap(),
|
||||
)),
|
||||
)?;
|
||||
pi = fetch_page_info(&conn, &pi.url)?
|
||||
.expect("page should exist")
|
||||
|
@ -2037,7 +2033,7 @@ mod tests {
|
|||
if let Some(title) = page.bookmark_title {
|
||||
bookmarks::insert_bookmark(
|
||||
&db,
|
||||
&InsertableBookmark {
|
||||
InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
|
@ -2473,15 +2469,17 @@ mod tests {
|
|||
for (guid, url) in &[&b0, &b1, &b2] {
|
||||
bookmarks::insert_bookmark(
|
||||
&conn,
|
||||
&InsertableItem::Bookmark(InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
guid: Some(guid.clone()),
|
||||
url: url.clone(),
|
||||
title: None,
|
||||
}),
|
||||
InsertableItem::Bookmark {
|
||||
b: InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
guid: Some(guid.clone()),
|
||||
url: url.clone(),
|
||||
title: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -2588,7 +2586,7 @@ mod tests {
|
|||
|
||||
bookmarks::insert_bookmark(
|
||||
&conn,
|
||||
&InsertableBookmark {
|
||||
InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
|
@ -2767,8 +2765,9 @@ mod tests {
|
|||
// Can observe preview url without an associated visit.
|
||||
assert!(apply_observation(
|
||||
&conn,
|
||||
VisitObservation::new(url1.clone())
|
||||
.with_preview_image_url(Some("https://www.example.com/image.png".to_string()))
|
||||
VisitObservation::new(url1.clone()).with_preview_image_url(Some(
|
||||
Url::parse("https://www.example.com/image.png").unwrap()
|
||||
))
|
||||
)
|
||||
.unwrap()
|
||||
.is_none());
|
||||
|
@ -2812,7 +2811,9 @@ mod tests {
|
|||
let another_visit_id = apply_observation(
|
||||
&conn,
|
||||
VisitObservation::new(Url::parse("https://www.example.com/another/").unwrap())
|
||||
.with_preview_image_url(Some("https://www.example.com/funky/image.png".to_string()))
|
||||
.with_preview_image_url(Some(
|
||||
Url::parse("https://www.example.com/funky/image.png").unwrap(),
|
||||
))
|
||||
.with_visit_type(VisitTransition::Link),
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -2832,34 +2833,6 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bad_preview_url() {
|
||||
let conn = PlacesDb::open_in_memory(ConnectionType::ReadWrite).unwrap();
|
||||
|
||||
// Observing a bad preview url as part of a visit observation fails.
|
||||
assert!(
|
||||
apply_observation(
|
||||
&conn,
|
||||
VisitObservation::new(Url::parse("https://www.example.com/").unwrap())
|
||||
.with_visit_type(VisitTransition::Link)
|
||||
.with_preview_image_url(Some("not at all a url".to_string())),
|
||||
)
|
||||
.is_err(),
|
||||
"expected bad preview url to fail an observation"
|
||||
);
|
||||
|
||||
// Observing a bad preview url by itself also fails.
|
||||
assert!(
|
||||
apply_observation(
|
||||
&conn,
|
||||
VisitObservation::new(Url::parse("https://www.example.com/").unwrap())
|
||||
.with_preview_image_url(Some("not at all a url".to_string())),
|
||||
)
|
||||
.is_err(),
|
||||
"expected bad preview url to fail an observation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_long_strings() {
|
||||
let _ = env_logger::try_init();
|
||||
|
@ -2881,7 +2854,7 @@ mod tests {
|
|||
&conn,
|
||||
VisitObservation::new(Url::parse("https://www.example.com/").unwrap())
|
||||
.with_visit_type(VisitTransition::Link)
|
||||
.with_preview_image_url(url),
|
||||
.with_preview_image_url(Url::parse(&url).unwrap()),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(
|
||||
|
@ -3038,7 +3011,10 @@ mod tests {
|
|||
let infos_with_bound =
|
||||
get_visit_page_with_bound(&conn, now_i64 - 200_000, 7, 1, VisitTransitionSet::empty())
|
||||
.unwrap();
|
||||
assert_eq!(infos_with_bound.infos[0].url, "https://www.example.com/9",);
|
||||
assert_eq!(
|
||||
infos_with_bound.infos[0].url,
|
||||
Url::parse("https://www.example.com/9").unwrap(),
|
||||
);
|
||||
|
||||
// test when offset fall on one item after visited_date changes
|
||||
let infos_with_bound =
|
||||
|
|
|
@ -489,8 +489,8 @@ pub fn delete_older_than(db: &PlacesDb, older_than: i64) -> Result<()> {
|
|||
|
||||
pub fn delete_metadata(
|
||||
db: &PlacesDb,
|
||||
url: &str,
|
||||
referrer_url: Option<&str>,
|
||||
url: &Url,
|
||||
referrer_url: Option<&Url>,
|
||||
search_term: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let tx = db.begin_transaction()?;
|
||||
|
@ -500,7 +500,7 @@ pub fn delete_metadata(
|
|||
// e.g. referrer_url.is_some(), but a correspodning moz_places entry doesn't exist.
|
||||
// In practice this shouldn't happen, or it may imply API misuse, but in either case we shouldn't
|
||||
// delete things we were not asked to delete.
|
||||
let place_entry = PlaceEntry::fetch(url, &tx, None)?;
|
||||
let place_entry = PlaceEntry::fetch(url.as_str(), &tx, None)?;
|
||||
let place_entry = match place_entry {
|
||||
PlaceEntry::Existing(_) => place_entry,
|
||||
PlaceEntry::CreateFor(_, _) => {
|
||||
|
@ -509,8 +509,8 @@ pub fn delete_metadata(
|
|||
}
|
||||
};
|
||||
let referrer_entry = match referrer_url {
|
||||
Some(referrer_url) if !referrer_url.is_empty() => {
|
||||
Some(PlaceEntry::fetch(referrer_url, &tx, None)?)
|
||||
Some(referrer_url) if !referrer_url.as_str().is_empty() => {
|
||||
Some(PlaceEntry::fetch(referrer_url.as_str(), &tx, None)?)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
@ -1323,7 +1323,7 @@ mod tests {
|
|||
let observation1 = VisitObservation::new(Url::parse("https://www.cbc.ca/news/politics/federal-budget-2021-freeland-zimonjic-1.5991021").unwrap())
|
||||
.with_at(now)
|
||||
.with_title(Some(String::from("Budget vows to build 'for the long term' as it promises child care cash, projects massive deficits | CBC News")))
|
||||
.with_preview_image_url(Some(String::from("https://i.cbc.ca/1.5993583.1618861792!/cpImage/httpImage/image.jpg_gen/derivatives/16x9_620/fedbudget-20210419.jpg")))
|
||||
.with_preview_image_url(Some(Url::parse("https://i.cbc.ca/1.5993583.1618861792!/cpImage/httpImage/image.jpg_gen/derivatives/16x9_620/fedbudget-20210419.jpg").unwrap()))
|
||||
.with_is_remote(false)
|
||||
.with_visit_type(VisitTransition::Link);
|
||||
apply_observation(&conn, observation1).unwrap();
|
||||
|
@ -1495,13 +1495,19 @@ mod tests {
|
|||
);
|
||||
|
||||
assert_eq!(6, get_since(&conn, 0).expect("get worked").len());
|
||||
delete_metadata(&conn, "http://mozilla.com/1", None, None).expect("delete metadata");
|
||||
delete_metadata(
|
||||
&conn,
|
||||
&Url::parse("http://mozilla.com/1").unwrap(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.expect("delete metadata");
|
||||
assert_eq!(5, get_since(&conn, 0).expect("get worked").len());
|
||||
|
||||
delete_metadata(
|
||||
&conn,
|
||||
"http://mozilla.com/1",
|
||||
Some("http://mozilla.com/"),
|
||||
&Url::parse("http://mozilla.com/1").unwrap(),
|
||||
Some(&Url::parse("http://mozilla.com/").unwrap()),
|
||||
None,
|
||||
)
|
||||
.expect("delete metadata");
|
||||
|
@ -1509,22 +1515,27 @@ mod tests {
|
|||
|
||||
delete_metadata(
|
||||
&conn,
|
||||
"http://mozilla.com/1",
|
||||
Some("http://mozilla.com/"),
|
||||
&Url::parse("http://mozilla.com/1").unwrap(),
|
||||
Some(&Url::parse("http://mozilla.com/").unwrap()),
|
||||
Some("1 with search"),
|
||||
)
|
||||
.expect("delete metadata");
|
||||
assert_eq!(3, get_since(&conn, 0).expect("get worked").len());
|
||||
|
||||
delete_metadata(&conn, "http://mozilla.com/1", None, Some("1 with search"))
|
||||
.expect("delete metadata");
|
||||
delete_metadata(
|
||||
&conn,
|
||||
&Url::parse("http://mozilla.com/1").unwrap(),
|
||||
None,
|
||||
Some("1 with search"),
|
||||
)
|
||||
.expect("delete metadata");
|
||||
assert_eq!(2, get_since(&conn, 0).expect("get worked").len());
|
||||
|
||||
// key doesn't match, do nothing
|
||||
delete_metadata(
|
||||
&conn,
|
||||
"http://mozilla.com/2",
|
||||
Some("http://wrong-referrer.com"),
|
||||
&Url::parse("http://mozilla.com/2").unwrap(),
|
||||
Some(&Url::parse("http://wrong-referrer.com").unwrap()),
|
||||
Some("2 with search"),
|
||||
)
|
||||
.expect("delete metadata");
|
||||
|
@ -1660,30 +1671,34 @@ mod tests {
|
|||
// add bookmark for the page we have a metadata entry
|
||||
bookmarks::insert_bookmark(
|
||||
&conn,
|
||||
&InsertableItem::Bookmark(InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
guid: Some(SyncGuid::from("cccccccccccc")),
|
||||
url,
|
||||
title: None,
|
||||
}),
|
||||
InsertableItem::Bookmark {
|
||||
b: InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
guid: Some(SyncGuid::from("cccccccccccc")),
|
||||
url,
|
||||
title: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("bookmark insert worked");
|
||||
|
||||
// add another bookmark to the "parent" of our metadata entry
|
||||
bookmarks::insert_bookmark(
|
||||
&conn,
|
||||
&InsertableItem::Bookmark(InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
guid: Some(SyncGuid::from("ccccccccccca")),
|
||||
url: parent_url,
|
||||
title: None,
|
||||
}),
|
||||
InsertableItem::Bookmark {
|
||||
b: InsertableBookmark {
|
||||
parent_guid: BookmarkRootGuid::Unfiled.into(),
|
||||
position: BookmarkPosition::Append,
|
||||
date_added: None,
|
||||
last_modified: None,
|
||||
guid: Some(SyncGuid::from("ccccccccccca")),
|
||||
url: parent_url,
|
||||
title: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("bookmark insert worked");
|
||||
|
||||
|
|
|
@ -12,7 +12,8 @@ pub mod tags;
|
|||
|
||||
use crate::db::PlacesDb;
|
||||
use crate::error::{ErrorKind, InvalidPlaceInfo, Result};
|
||||
use crate::msg_types::{HistoryVisitInfo, TopFrecentSiteInfo};
|
||||
use crate::ffi::HistoryVisitInfo;
|
||||
use crate::ffi::TopFrecentSiteInfo;
|
||||
use crate::types::{SyncStatus, VisitTransition};
|
||||
use rusqlite::types::{FromSql, FromSqlResult, ToSql, ToSqlOutput, ValueRef};
|
||||
use rusqlite::Result as RusqliteResult;
|
||||
|
@ -190,21 +191,27 @@ impl HistoryVisitInfo {
|
|||
// is fine.
|
||||
.unwrap_or(VisitTransition::Link);
|
||||
let visit_date: Timestamp = row.get("visit_date")?;
|
||||
let url: String = row.get("url")?;
|
||||
let preview_image_url: Option<String> = row.get("preview_image_url")?;
|
||||
Ok(Self {
|
||||
url: row.get("url")?,
|
||||
url: Url::parse(&url)?,
|
||||
title: row.get("title")?,
|
||||
timestamp: visit_date.0 as i64,
|
||||
visit_type: visit_type as i32,
|
||||
timestamp: visit_date,
|
||||
visit_type,
|
||||
is_hidden: row.get("hidden")?,
|
||||
preview_image_url: row.get("preview_image_url")?,
|
||||
preview_image_url: match preview_image_url {
|
||||
Some(s) => Some(Url::parse(&s)?),
|
||||
None => None,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TopFrecentSiteInfo {
|
||||
pub(crate) fn from_row(row: &rusqlite::Row<'_>) -> Result<Self> {
|
||||
let url: String = row.get("url")?;
|
||||
Ok(Self {
|
||||
url: row.get("url")?,
|
||||
url: Url::parse(&url)?,
|
||||
title: row.get("title")?,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -5,11 +5,14 @@
|
|||
use rusqlite::NO_PARAMS;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::error::*;
|
||||
use crate::{
|
||||
db::PlacesDb,
|
||||
storage::bookmarks::{fetch_tree, get_raw_bookmark, insert_tree, BookmarkTreeNode, FetchDepth},
|
||||
storage::bookmarks::get_raw_bookmark,
|
||||
storage::bookmarks::json_tree::{fetch_tree, insert_tree, BookmarkTreeNode, FetchDepth},
|
||||
types::BookmarkType,
|
||||
};
|
||||
|
||||
use sql_support::ConnExt;
|
||||
use sync_guid::Guid as SyncGuid;
|
||||
use types::Timestamp;
|
||||
|
@ -19,10 +22,10 @@ use pretty_assertions::assert_eq;
|
|||
pub fn insert_json_tree(conn: &PlacesDb, jtree: Value) {
|
||||
let tree: BookmarkTreeNode = serde_json::from_value(jtree).expect("should be valid");
|
||||
let folder_node = match tree {
|
||||
BookmarkTreeNode::Folder(folder_node) => folder_node,
|
||||
BookmarkTreeNode::Folder { f: folder_node } => folder_node,
|
||||
_ => panic!("must be a folder"),
|
||||
};
|
||||
insert_tree(conn, &folder_node).expect("should insert");
|
||||
insert_tree(conn, folder_node).expect("should insert");
|
||||
}
|
||||
|
||||
pub struct InvalidBookmarkIds {
|
||||
|
|
|
@ -17,8 +17,7 @@ pub struct InvalidVisitType;
|
|||
|
||||
// NOTE: These discriminator values are the same as those used by Desktop
|
||||
// Firefox and are what is written to the database. We also duplicate them
|
||||
// in the android lib as constants on PlacesConnection, and in a couple
|
||||
// constants in visit_transition_set.rs
|
||||
// as constants in visit_transition_set.rs
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum VisitTransition {
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::convert::{TryFrom, TryInto};
|
|||
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub struct VisitTransitionSet {
|
||||
bits: u16,
|
||||
pub(crate) bits: u16,
|
||||
}
|
||||
|
||||
const ALL_BITS_SET: u16 = (1u16 << (VisitTransition::Link as u8))
|
||||
|
|
|
@ -20,11 +20,11 @@ viaduct = { path = "../viaduct" }
|
|||
sql-support = { path = "../support/sql" }
|
||||
rc_crypto = { path = "../support/rc_crypto", features = ["ece"] }
|
||||
thiserror = "1.0"
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features=["builtin-bindgen"] }
|
||||
uniffi_build = { version = "^0.16", features=["builtin-bindgen"] }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -416,6 +416,12 @@ mod test {
|
|||
assert!(!Guid::from("aaaabbbbccccd").is_valid_for_places()); // too long
|
||||
assert!(!Guid::from("aaaabbbbccc").is_valid_for_places()); // too short
|
||||
assert!(!Guid::from("aaaabbbbccc=").is_valid_for_places()); // right length, bad character
|
||||
assert!(!Guid::empty().is_valid_for_places()); // empty isn't valid to insert.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_for_sync_server() {
|
||||
assert!(!Guid::empty().is_valid_for_sync_server()); // empty isn't valid remotely.
|
||||
}
|
||||
|
||||
#[allow(clippy::cmp_owned)] // See clippy note below.
|
||||
|
|
|
@ -36,6 +36,10 @@ impl Timestamp {
|
|||
pub fn as_millis(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn as_millis_i64(self) -> i64 {
|
||||
self.0 as i64
|
||||
}
|
||||
/// In desktop sync, bookmarks are clamped to Jan 23, 1993 (which is 727747200000)
|
||||
/// There's no good reason history records could be older than that, so we do
|
||||
/// the same here (even though desktop's history currently doesn't)
|
||||
|
|
|
@ -23,8 +23,8 @@ serde_derive = "1"
|
|||
serde_json = "1"
|
||||
parking_lot = "0.5"
|
||||
interrupt-support = { path = "../support/interrupt" }
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features=["builtin-bindgen"] }
|
||||
uniffi_build = { version = "^0.16", features=["builtin-bindgen"] }
|
||||
|
|
|
@ -19,8 +19,8 @@ interrupt-support = { path = "../support/interrupt" }
|
|||
sync-guid = { path = "../support/guid", features = ["random"] }
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
uniffi = "^0.15"
|
||||
uniffi_macros = "^0.15"
|
||||
uniffi = "^0.16"
|
||||
uniffi_macros = "^0.16"
|
||||
|
||||
[build-dependencies]
|
||||
uniffi_build = { version = "^0.15", features = [ "builtin-bindgen" ]}
|
||||
uniffi_build = { version = "^0.16", features = [ "builtin-bindgen" ]}
|
||||
|
|
|
@ -6,8 +6,11 @@
|
|||
|
||||
use cli_support::fxa_creds::{get_cli_fxa, get_default_fxa_config};
|
||||
use places::storage::bookmarks::{
|
||||
fetch_tree, insert_tree, BookmarkNode, BookmarkRootGuid, BookmarkTreeNode, FetchDepth,
|
||||
FolderNode, SeparatorNode,
|
||||
json_tree::{
|
||||
fetch_tree, insert_tree, BookmarkNode, BookmarkTreeNode, FetchDepth, FolderNode,
|
||||
SeparatorNode,
|
||||
},
|
||||
BookmarkRootGuid,
|
||||
};
|
||||
use places::types::BookmarkType;
|
||||
use places::{ConnectionType, PlacesApi, PlacesDb};
|
||||
|
@ -83,7 +86,7 @@ fn do_import(db: &PlacesDb, root: BookmarkTreeNode) -> Result<()> {
|
|||
// Later we will want to get smarter around guids - currently we will
|
||||
// fail to do this twice due to guid dupes - but that's OK for now.
|
||||
let folder = match root {
|
||||
BookmarkTreeNode::Folder(folder_node) => folder_node,
|
||||
BookmarkTreeNode::Folder { f } => f,
|
||||
_ => {
|
||||
println!("Imported node isn't a folder structure");
|
||||
return Ok(());
|
||||
|
@ -101,14 +104,14 @@ fn do_import(db: &PlacesDb, root: BookmarkTreeNode) -> Result<()> {
|
|||
|
||||
for sub_root_node in folder.children {
|
||||
let sub_root_folder = match sub_root_node {
|
||||
BookmarkTreeNode::Folder(folder_node) => folder_node,
|
||||
BookmarkTreeNode::Folder { f } => f,
|
||||
_ => {
|
||||
println!("Child of the root isn't a folder - skipping...");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
println!("importing {:?}", sub_root_folder.guid);
|
||||
insert_tree(db, &sub_root_folder)?
|
||||
insert_tree(db, sub_root_folder)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ fxa-client = { path = "../../components/fxa-client" }
|
|||
logins = { path = "../../components/logins" }
|
||||
tabs = { path = "../../components/tabs/" }
|
||||
sync_manager = { path = "../../components/sync_manager/" }
|
||||
places-ffi = { path = "../../components/places/ffi" }
|
||||
places = { path = "../../components/places" }
|
||||
push = { path = "../../components/push" }
|
||||
rc_log_ffi = { path = "../../components/rc_log" }
|
||||
viaduct = { path = "../../components/viaduct" }
|
||||
|
|
|
@ -13,7 +13,7 @@ pub use crashtest;
|
|||
pub use fxa_client;
|
||||
pub use logins;
|
||||
pub use nimbus;
|
||||
pub use places_ffi;
|
||||
pub use places;
|
||||
pub use push;
|
||||
pub use rc_log_ffi;
|
||||
pub use sync_manager;
|
||||
|
|
|
@ -20,4 +20,3 @@ autofill = { path = "../../components/autofill" }
|
|||
push = { path = "../../components/push" }
|
||||
tabs = { path = "../../components/tabs" }
|
||||
places = {path = "../../components/places" }
|
||||
places-ffi = { path = "../../components/places/ffi" }
|
|
@ -16,5 +16,3 @@
|
|||
#import "RustLogFFI.h"
|
||||
// This is the uniffi-ed header
|
||||
#import "placesFFI.h"
|
||||
// This is the ffi header and will be deleted after uniffi
|
||||
#import "RustPlacesAPI.h"
|
||||
|
|
|
@ -130,8 +130,6 @@ mkdir -p "$COMMON/Headers"
|
|||
cp "$THIS_DIR/MozillaRustComponents.h" "$COMMON/Headers"
|
||||
cp "$REPO_ROOT/components/rc_log/ios/RustLogFFI.h" "$COMMON/Headers"
|
||||
cp "$REPO_ROOT/components/viaduct/ios/RustViaductFFI.h" "$COMMON/Headers"
|
||||
# This will go away after places becomes uniffi-ed
|
||||
cp "$REPO_ROOT/components/places/ios/Places/RustPlacesAPI.h" "$COMMON/Headers"
|
||||
# TODO: https://github.com/mozilla/uniffi-rs/issues/1060
|
||||
# it would be neat if there was a single UniFFI command that would spit out
|
||||
# all of the generated headers for all UniFFIed dependencies of a given crate.
|
||||
|
|
|
@ -11,7 +11,6 @@ pub use fxa_client;
|
|||
pub use logins;
|
||||
pub use nimbus;
|
||||
pub use places;
|
||||
pub use places_ffi;
|
||||
pub use push;
|
||||
pub use rc_log_ffi;
|
||||
pub use tabs;
|
||||
|
|
|
@ -13,6 +13,5 @@ FOUNDATION_EXPORT const unsigned char MegazordClientVersionString[];
|
|||
#import "loginsFFI.h"
|
||||
#import "placesFFI.h"
|
||||
#import "RustLogFFI.h"
|
||||
#import "RustPlacesAPI.h"
|
||||
#import "RustViaductFFI.h"
|
||||
#import "GleanFfi.h"
|
||||
|
|
|
@ -70,12 +70,7 @@
|
|||
CD5ECD192716FB88009D10CC /* QuantityMetric.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD5ECD182716FB88009D10CC /* QuantityMetric.swift */; };
|
||||
CD5ECD1B2716FB99009D10CC /* UrlMetric.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD5ECD1A2716FB99009D10CC /* UrlMetric.swift */; };
|
||||
CD85A45422361E890099BFA9 /* Places.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD85A44922361E880099BFA9 /* Places.swift */; };
|
||||
CD85A45522361E890099BFA9 /* RustPlacesAPI.h in Headers */ = {isa = PBXBuildFile; fileRef = CD85A44A22361E880099BFA9 /* RustPlacesAPI.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
CD85A45722361E890099BFA9 /* String+Free_Places.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD85A44D22361E880099BFA9 /* String+Free_Places.swift */; };
|
||||
CD85A45822361E890099BFA9 /* Data+PlacesRustBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD85A44E22361E880099BFA9 /* Data+PlacesRustBuffer.swift */; };
|
||||
CD85A45922361E890099BFA9 /* Bookmark.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD85A44F22361E880099BFA9 /* Bookmark.swift */; };
|
||||
CD85A45A22361E890099BFA9 /* PlacesError.swift in Sources */ = {isa = PBXBuildFile; fileRef = CD85A45122361E880099BFA9 /* PlacesError.swift */; };
|
||||
CDC0089F2236CAD100893800 /* places_msg_types.proto in Sources */ = {isa = PBXBuildFile; fileRef = CDC0089E2236CAD100893800 /* places_msg_types.proto */; };
|
||||
CDC21B14221DCE3700AA71E5 /* RustLog.swift in Sources */ = {isa = PBXBuildFile; fileRef = CDC21B12221DCE3700AA71E5 /* RustLog.swift */; };
|
||||
CDC21B15221DCE3700AA71E5 /* RustLogFFI.h in Headers */ = {isa = PBXBuildFile; fileRef = CDC21B13221DCE3700AA71E5 /* RustLogFFI.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
CE0A9AB424E4A2CC00914A16 /* fxa_client.udl in Sources */ = {isa = PBXBuildFile; fileRef = CE0A9AB224E4A25C00914A16 /* fxa_client.udl */; };
|
||||
|
@ -223,12 +218,7 @@
|
|||
CD5ECD182716FB88009D10CC /* QuantityMetric.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = QuantityMetric.swift; sourceTree = "<group>"; };
|
||||
CD5ECD1A2716FB99009D10CC /* UrlMetric.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UrlMetric.swift; sourceTree = "<group>"; };
|
||||
CD85A44922361E880099BFA9 /* Places.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Places.swift; sourceTree = "<group>"; };
|
||||
CD85A44A22361E880099BFA9 /* RustPlacesAPI.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RustPlacesAPI.h; sourceTree = "<group>"; };
|
||||
CD85A44D22361E880099BFA9 /* String+Free_Places.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "String+Free_Places.swift"; sourceTree = "<group>"; };
|
||||
CD85A44E22361E880099BFA9 /* Data+PlacesRustBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Data+PlacesRustBuffer.swift"; sourceTree = "<group>"; };
|
||||
CD85A44F22361E880099BFA9 /* Bookmark.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Bookmark.swift; sourceTree = "<group>"; };
|
||||
CD85A45122361E880099BFA9 /* PlacesError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PlacesError.swift; sourceTree = "<group>"; };
|
||||
CDC0089E2236CAD100893800 /* places_msg_types.proto */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.protobuf; name = places_msg_types.proto; path = ../../src/places_msg_types.proto; sourceTree = "<group>"; };
|
||||
CDC21B12221DCE3700AA71E5 /* RustLog.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RustLog.swift; sourceTree = "<group>"; };
|
||||
CDC21B13221DCE3700AA71E5 /* RustLogFFI.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RustLogFFI.h; sourceTree = "<group>"; };
|
||||
CE0A9AB224E4A25C00914A16 /* fxa_client.udl */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; name = fxa_client.udl; path = ../../src/fxa_client.udl; sourceTree = "<group>"; };
|
||||
|
@ -520,36 +510,15 @@
|
|||
isa = PBXGroup;
|
||||
children = (
|
||||
1B9B8F0626743B7500C8A918 /* Generated */,
|
||||
CDC0089E2236CAD100893800 /* places_msg_types.proto */,
|
||||
1B9B8EFE267439CC00C8A918 /* places.udl */,
|
||||
CD85A44922361E880099BFA9 /* Places.swift */,
|
||||
CD85A44A22361E880099BFA9 /* RustPlacesAPI.h */,
|
||||
CD85A44C22361E880099BFA9 /* Extensions */,
|
||||
CD85A44F22361E880099BFA9 /* Bookmark.swift */,
|
||||
CD85A45022361E880099BFA9 /* Errors */,
|
||||
D9FA8C532644A170008344BF /* HistoryMetadata.swift */,
|
||||
);
|
||||
name = Places;
|
||||
path = ../../components/places/ios/Places;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
CD85A44C22361E880099BFA9 /* Extensions */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
CD85A44D22361E880099BFA9 /* String+Free_Places.swift */,
|
||||
CD85A44E22361E880099BFA9 /* Data+PlacesRustBuffer.swift */,
|
||||
);
|
||||
path = Extensions;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
CD85A45022361E880099BFA9 /* Errors */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
CD85A45122361E880099BFA9 /* PlacesError.swift */,
|
||||
);
|
||||
path = Errors;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
CDC21B11221DCE3700AA71E5 /* RustLog */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
|
@ -636,7 +605,6 @@
|
|||
D05434A1225680D900FDE4EF /* MozillaAppServices.h in Headers */,
|
||||
CDC21B15221DCE3700AA71E5 /* RustLogFFI.h in Headers */,
|
||||
CE58B2F8242D54340089F091 /* RustViaductFFI.h in Headers */,
|
||||
CD85A45522361E890099BFA9 /* RustPlacesAPI.h in Headers */,
|
||||
BF1A879225064A4C00FED88E /* GleanFfi.h in Headers */,
|
||||
990863A125F9D25A00032083 /* crashtestFFI.h in Headers */,
|
||||
99FAA19B25E61D5D001E2231 /* fxa_clientFFI.h in Headers */,
|
||||
|
@ -804,7 +772,6 @@
|
|||
CE13F19B23F330DF005187A7 /* FxAccountMigration.swift in Sources */,
|
||||
CE0A9AB424E4A2CC00914A16 /* fxa_client.udl in Sources */,
|
||||
BF1A879525064A6600FED88E /* Configuration.swift in Sources */,
|
||||
CDC0089F2236CAD100893800 /* places_msg_types.proto in Sources */,
|
||||
BF1A87B025064A8100FED88E /* LabeledMetric.swift in Sources */,
|
||||
BF1A87AD25064A8100FED88E /* Lifetime.swift in Sources */,
|
||||
BF1A87AF25064A8100FED88E /* MemoryDistributionMetric.swift in Sources */,
|
||||
|
@ -823,7 +790,6 @@
|
|||
99FAA19F25E65CA5001E2231 /* FxAccountOAuth.swift in Sources */,
|
||||
398A4149264986E200AA22F1 /* FeatureVariables.swift in Sources */,
|
||||
BF1A87D025064AC000FED88E /* Sysctl.swift in Sources */,
|
||||
CD85A45A22361E890099BFA9 /* PlacesError.swift in Sources */,
|
||||
993D6D4F26B11E410075A820 /* SyncUnlockInfo.swift in Sources */,
|
||||
BF1A87AB25064A8100FED88E /* DistributionData.swift in Sources */,
|
||||
CD5ECD192716FB88009D10CC /* QuantityMetric.swift in Sources */,
|
||||
|
@ -837,9 +803,7 @@
|
|||
BF1A87D125064AC000FED88E /* Unreachable.swift in Sources */,
|
||||
CE2D04C5231822AC00AF5722 /* FxAccountManager.swift in Sources */,
|
||||
CE1B09A3231863D7006226E1 /* KeychainWrapper+.swift in Sources */,
|
||||
CD85A45722361E890099BFA9 /* String+Free_Places.swift in Sources */,
|
||||
CE9FFA13242D4E7B0011029E /* Viaduct.swift in Sources */,
|
||||
CD85A45822361E890099BFA9 /* Data+PlacesRustBuffer.swift in Sources */,
|
||||
395992B825FBE40300E3185F /* NimbusApi.swift in Sources */,
|
||||
BF1A87B925064A8100FED88E /* TimingDistributionMetric.swift in Sources */,
|
||||
CD5ECD1B2716FB99009D10CC /* UrlMetric.swift in Sources */,
|
||||
|
|
|
@ -57,7 +57,7 @@ enum CheckChildren {
|
|||
}
|
||||
|
||||
// similar assert_json_tree from our rust code.
|
||||
func checkTree(_ n: BookmarkNode, _ want: [String: Any], checkChildren: CheckChildren = .full) {
|
||||
func checkTree(_ n: BookmarkNodeData, _ want: [String: Any], checkChildren: CheckChildren = .full) {
|
||||
XCTAssert(n.parentGUID != nil || n.guid == BookmarkRoots.RootGUID)
|
||||
|
||||
XCTAssert(dynCmp(n.guid, want["guid"]))
|
||||
|
@ -65,21 +65,21 @@ func checkTree(_ n: BookmarkNode, _ want: [String: Any], checkChildren: CheckChi
|
|||
|
||||
switch n.type {
|
||||
case .separator:
|
||||
XCTAssert(n is BookmarkSeparator)
|
||||
XCTAssert(n is BookmarkSeparatorData)
|
||||
case .bookmark:
|
||||
XCTAssert(n is BookmarkItem)
|
||||
XCTAssert(n is BookmarkItemData)
|
||||
case .folder:
|
||||
XCTAssert(n is BookmarkFolder)
|
||||
XCTAssert(n is BookmarkFolderData)
|
||||
}
|
||||
|
||||
if let bn = n as? BookmarkItem {
|
||||
if let bn = n as? BookmarkItemData {
|
||||
XCTAssert(dynCmp(bn.url, want["url"]))
|
||||
XCTAssert(dynCmp(bn.title, want["title"]))
|
||||
} else {
|
||||
XCTAssertNil(want["url"])
|
||||
}
|
||||
|
||||
if let fn = n as? BookmarkFolder {
|
||||
if let fn = n as? BookmarkFolderData {
|
||||
if checkChildren == .onlyGUIDs {
|
||||
XCTAssertNil(fn.children)
|
||||
// Make sure it's not getting provided accidentally
|
||||
|
@ -115,7 +115,7 @@ func counter() -> Int {
|
|||
}
|
||||
|
||||
@discardableResult
|
||||
func insertTree(_ db: PlacesWriteConnection, parent: String, tree: [String: Any]) -> String {
|
||||
func insertTree(_ db: PlacesWriteConnection, parent: Guid, tree: [String: Any]) -> String {
|
||||
let root = try! db.createFolder(parentGUID: parent, title: (tree["title"] as? String) ?? "folder \(counter())")
|
||||
for child in tree["children"] as! [[String: Any]] {
|
||||
switch typeFromAny(child["type"])! {
|
||||
|
@ -223,9 +223,9 @@ class PlacesTests: XCTestCase {
|
|||
])
|
||||
|
||||
// Check recursive: false
|
||||
let noGrandkids = try! db.getBookmarksTree(rootGUID: BookmarkRoots.MenuFolderGUID, recursive: false)! as! BookmarkFolder
|
||||
let noGrandkids = try! db.getBookmarksTree(rootGUID: BookmarkRoots.MenuFolderGUID, recursive: false)! as! BookmarkFolderData
|
||||
|
||||
let expectedChildGuids = ((got as! BookmarkFolder).children![0] as! BookmarkFolder).childGUIDs
|
||||
let expectedChildGuids = ((got as! BookmarkFolderData).children![0] as! BookmarkFolderData).childGUIDs
|
||||
|
||||
checkTree(noGrandkids, [
|
||||
"guid": BookmarkRoots.MenuFolderGUID,
|
||||
|
@ -371,7 +371,7 @@ class PlacesTests: XCTestCase {
|
|||
_ = try db.updateBookmarkNode(guid: "123", parentGUID: "456")
|
||||
XCTFail("Call did not throw")
|
||||
} catch let caughtError as PlacesError {
|
||||
if case PlacesError.noSuchItem = caughtError {
|
||||
if case PlacesError.UnknownBookmarkItem = caughtError {
|
||||
} else {
|
||||
XCTFail("Not the correct error ")
|
||||
}
|
||||
|
@ -381,10 +381,10 @@ class PlacesTests: XCTestCase {
|
|||
|
||||
// Testing a Uniffi-ed error
|
||||
do {
|
||||
_ = try db.getLatestHistoryMetadataForUrl(url: "somerandomurl")
|
||||
_ = try db.noteHistoryMetadataObservation(observation: HistoryMetadataObservation(url: "http://www.[].com"))
|
||||
XCTFail("Call did not throw")
|
||||
} catch let caughtError as PlacesError {
|
||||
if case PlacesError.urlParseError = caughtError {
|
||||
if case PlacesError.UrlParseFailed = caughtError {
|
||||
} else {
|
||||
XCTAssertEqual(caughtError.localizedDescription, "Error")
|
||||
XCTFail("Not the correct PlacesError")
|
||||
|
|
|
@ -11,7 +11,7 @@ crate-type = ["staticlib"]
|
|||
[dependencies]
|
||||
fxa-client = { path = "../../../components/fxa-client" }
|
||||
logins = { path = "../../../components/logins" }
|
||||
places-ffi = { path = "../../../components/places/ffi" }
|
||||
places = { path = "../../../components/places" }
|
||||
rc_log_ffi = { path = "../../../components/rc_log" }
|
||||
viaduct = { path = "../../../components/viaduct" }
|
||||
viaduct-reqwest = { path = "../../../components/support/viaduct-reqwest" }
|
||||
|
|
|
@ -10,6 +10,6 @@ pub use fxa_client;
|
|||
pub use glean_ffi;
|
||||
pub use logins;
|
||||
pub use nimbus;
|
||||
pub use places_ffi;
|
||||
pub use places;
|
||||
pub use rc_log_ffi;
|
||||
pub use viaduct_reqwest;
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use places::import::fennec::bookmarks::BookmarksMigrationResult;
|
||||
use places::storage::bookmarks::fetch::Item;
|
||||
use places::{api::places_api::PlacesApi, ErrorKind, Result};
|
||||
use rusqlite::types::{ToSql, ToSqlOutput};
|
||||
use rusqlite::{Connection, NO_PARAMS};
|
||||
|
@ -378,7 +379,7 @@ fn test_import() -> Result<()> {
|
|||
fn test_timestamp_sanitization() -> Result<()> {
|
||||
use places::api::places_api::ConnectionType;
|
||||
use places::import::common::NOW;
|
||||
use places::storage::bookmarks::public_node::fetch_bookmark;
|
||||
use places::storage::bookmarks::fetch::fetch_bookmark;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
fn get_actual_timestamps(
|
||||
|
@ -407,8 +408,8 @@ fn test_timestamp_sanitization() -> Result<()> {
|
|||
let reader = places_api.open_connection(ConnectionType::ReadOnly)?;
|
||||
let b = fetch_bookmark(&reader, &Guid::from("bookmarkAAAA"), true)?.unwrap();
|
||||
// regardless of what our caller asserts, modified must never be earlier than created.
|
||||
assert!(b.last_modified >= b.date_added);
|
||||
Ok((b.date_added, b.last_modified))
|
||||
assert!(b.last_modified() >= b.date_added());
|
||||
Ok((*b.date_added(), *b.last_modified()))
|
||||
}
|
||||
|
||||
let now = *NOW;
|
||||
|
@ -466,7 +467,7 @@ fn test_timestamp_sanitization() -> Result<()> {
|
|||
fn test_timestamp_sanitization_tags() -> Result<()> {
|
||||
use places::api::places_api::ConnectionType;
|
||||
use places::import::common::NOW;
|
||||
use places::storage::bookmarks::public_node::fetch_bookmark;
|
||||
use places::storage::bookmarks::fetch::fetch_bookmark;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
fn get_actual_timestamp(created: Timestamp, modified: Timestamp) -> Result<Timestamp> {
|
||||
|
@ -493,8 +494,8 @@ fn test_timestamp_sanitization_tags() -> Result<()> {
|
|||
let reader = places_api.open_connection(ConnectionType::ReadOnly)?;
|
||||
let b = fetch_bookmark(&reader, &Guid::from("bookmarkAAAA"), true)?.unwrap();
|
||||
// for items with tags, created and modified are always identical.
|
||||
assert_eq!(b.date_added, b.last_modified);
|
||||
Ok(b.date_added)
|
||||
assert_eq!(b.date_added(), b.last_modified());
|
||||
Ok(*b.date_added())
|
||||
}
|
||||
|
||||
let now = *NOW;
|
||||
|
@ -534,7 +535,7 @@ fn test_timestamp_sanitization_tags() -> Result<()> {
|
|||
#[test]
|
||||
fn test_positions() -> Result<()> {
|
||||
use places::api::places_api::ConnectionType;
|
||||
use places::storage::bookmarks::public_node::fetch_bookmark;
|
||||
use places::storage::bookmarks::fetch::fetch_bookmark;
|
||||
|
||||
let tmpdir = tempdir().unwrap();
|
||||
let fennec_path = tmpdir.path().join("browser.db");
|
||||
|
@ -577,28 +578,32 @@ fn test_positions() -> Result<()> {
|
|||
let places_api = PlacesApi::new(tmpdir.path().join("places.sqlite"))?;
|
||||
places::import::import_fennec_bookmarks(&places_api, fennec_path)?;
|
||||
|
||||
let unfiled = fetch_bookmark(
|
||||
let unfiled = match fetch_bookmark(
|
||||
&places_api.open_connection(ConnectionType::ReadOnly)?,
|
||||
&Guid::from("unfiled_____"),
|
||||
true,
|
||||
)?
|
||||
.expect("it exists");
|
||||
.expect("it exists")
|
||||
{
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("unfiled is a folder!"),
|
||||
};
|
||||
let children = unfiled.child_nodes.expect("have children");
|
||||
assert_eq!(children.len(), 3);
|
||||
// They should be ordered by the position and the actual positions updated.
|
||||
assert_eq!(children[0].guid, bm2);
|
||||
assert_eq!(children[0].position, 0);
|
||||
assert_eq!(children[1].guid, bm3);
|
||||
assert_eq!(children[1].position, 1);
|
||||
assert_eq!(children[2].guid, bm1);
|
||||
assert_eq!(children[2].position, 2);
|
||||
assert_eq!(*children[0].guid(), bm2);
|
||||
assert_eq!(*children[0].position(), 0);
|
||||
assert_eq!(*children[1].guid(), bm3);
|
||||
assert_eq!(*children[1].position(), 1);
|
||||
assert_eq!(*children[2].guid(), bm1);
|
||||
assert_eq!(*children[2].position(), 2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_null_parent() -> Result<()> {
|
||||
use places::api::places_api::ConnectionType;
|
||||
use places::storage::bookmarks::public_node::fetch_bookmark;
|
||||
use places::storage::bookmarks::fetch::fetch_bookmark;
|
||||
|
||||
let tmpdir = tempdir().unwrap();
|
||||
let fennec_path = tmpdir.path().join("browser.db");
|
||||
|
@ -629,22 +634,26 @@ fn test_null_parent() -> Result<()> {
|
|||
places::import::import_fennec_bookmarks(&places_api, fennec_path)?;
|
||||
|
||||
// should have ended up in unfiled.
|
||||
let unfiled = fetch_bookmark(
|
||||
let unfiled = match fetch_bookmark(
|
||||
&places_api.open_connection(ConnectionType::ReadOnly)?,
|
||||
&Guid::from("unfiled_____"),
|
||||
true,
|
||||
)?
|
||||
.expect("it exists");
|
||||
.expect("it exists")
|
||||
{
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("unfiled is a folder!"),
|
||||
};
|
||||
let children = unfiled.child_nodes.expect("have children");
|
||||
assert_eq!(children.len(), 1);
|
||||
assert_eq!(children[0].guid, "folderAAAAAA");
|
||||
assert_eq!(*children[0].guid(), "folderAAAAAA");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_utf8() -> Result<()> {
|
||||
use places::api::places_api::ConnectionType;
|
||||
use places::storage::bookmarks::public_node::fetch_bookmark;
|
||||
use places::storage::bookmarks::fetch::fetch_bookmark;
|
||||
use url::Url;
|
||||
|
||||
let tmpdir = tempdir().unwrap();
|
||||
|
@ -691,16 +700,24 @@ fn test_invalid_utf8() -> Result<()> {
|
|||
let conn = places_api.open_connection(ConnectionType::ReadOnly)?;
|
||||
|
||||
// should have ended up in unfiled.
|
||||
let unfiled = fetch_bookmark(&conn, &Guid::from("unfiled_____"), true)?.expect("it exists");
|
||||
let unfiled =
|
||||
match fetch_bookmark(&conn, &Guid::from("unfiled_____"), true)?.expect("it exists") {
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("unfiled is a folder!"),
|
||||
};
|
||||
|
||||
let url = Url::parse(&format!("http://example.com/{}", fixed))?;
|
||||
assert!(bookmark_exists(&places_api, url.as_str())?);
|
||||
|
||||
let children = unfiled.child_nodes.expect("have children");
|
||||
assert_eq!(children.len(), 1);
|
||||
assert_eq!(children[0].title, Some(fixed));
|
||||
let child = match &children[0] {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
assert_eq!(child.title, Some(fixed));
|
||||
// We can't know exactly what the fixed guid is, but it must be valid.
|
||||
assert!(children[0].guid.is_valid_for_places());
|
||||
assert!(child.guid.is_valid_for_places());
|
||||
// Can't check keyword or tags because we drop them except for sync users
|
||||
// (and for them, we've dropped them until their first sync)
|
||||
Ok(())
|
||||
|
|
|
@ -6,7 +6,7 @@ use dogear::Guid;
|
|||
use places::{
|
||||
api::places_api::{ConnectionType, PlacesApi},
|
||||
import::ios_bookmarks::IosBookmarkType,
|
||||
storage::bookmarks,
|
||||
storage::bookmarks::{self, fetch::Item},
|
||||
Result,
|
||||
};
|
||||
use rusqlite::Connection;
|
||||
|
@ -378,22 +378,28 @@ fn test_import_basic() -> Result<()> {
|
|||
|
||||
let places_db = places_api.open_connection(ConnectionType::ReadOnly)?;
|
||||
|
||||
let sep =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&sep_id), false)?.unwrap();
|
||||
assert_eq!(sep.node_type, places::BookmarkType::Separator);
|
||||
assert!(matches!(
|
||||
bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&sep_id), false)?.unwrap(),
|
||||
Item::Separator { .. }
|
||||
));
|
||||
|
||||
let bmk =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&bmk_id), false)?.unwrap();
|
||||
assert_eq!(bmk.node_type, places::BookmarkType::Bookmark);
|
||||
match bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&bmk_id), false)?.unwrap() {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
assert_eq!(
|
||||
bmk.url,
|
||||
Some(url::Url::parse("https://www.example.com/123").unwrap())
|
||||
url::Url::parse("https://www.example.com/123").unwrap()
|
||||
);
|
||||
assert_eq!(bmk.parent_guid, Some(sync_guid(&folder_id)));
|
||||
assert_eq!(bmk.parent_guid, sync_guid(&folder_id));
|
||||
|
||||
let fld =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&folder_id), false)?.unwrap();
|
||||
assert_eq!(fld.node_type, places::BookmarkType::Folder);
|
||||
let fld = match bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&folder_id), false)?
|
||||
.unwrap()
|
||||
{
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("expecting a folder"),
|
||||
};
|
||||
assert_eq!(fld.child_guids, Some(vec![sync_guid(&bmk_id)]));
|
||||
|
||||
Ok(())
|
||||
|
@ -437,37 +443,51 @@ fn test_import_with_local() -> Result<()> {
|
|||
let places_db = places_api.open_connection(ConnectionType::ReadOnly)?;
|
||||
|
||||
let bmk0 =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&b0id), false)?.unwrap();
|
||||
assert_eq!(bmk0.node_type, places::BookmarkType::Bookmark);
|
||||
assert_eq!(bmk0.parent_guid, Some(sync_guid(&dogear::MOBILE_GUID)));
|
||||
match bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&b0id), false)?.unwrap() {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
assert_eq!(bmk0.parent_guid, sync_guid(&dogear::MOBILE_GUID));
|
||||
assert_eq!(
|
||||
bmk0.url,
|
||||
Some(url::Url::parse("https://www.example.com/123").unwrap())
|
||||
url::Url::parse("https://www.example.com/123").unwrap()
|
||||
);
|
||||
|
||||
let bmk1 =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&b1id), false)?.unwrap();
|
||||
assert_eq!(bmk1.node_type, places::BookmarkType::Bookmark);
|
||||
assert_eq!(bmk1.parent_guid, Some(sync_guid(&dogear::MOBILE_GUID)));
|
||||
match bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&b1id), false)?.unwrap() {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
assert_eq!(bmk1.parent_guid, sync_guid(&dogear::MOBILE_GUID));
|
||||
assert_eq!(
|
||||
bmk1.url,
|
||||
Some(url::Url::parse("https://www.example.com/1%202%203").unwrap())
|
||||
url::Url::parse("https://www.example.com/1%202%203").unwrap()
|
||||
);
|
||||
|
||||
let bmk2 =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&b2id), false)?.unwrap();
|
||||
assert_eq!(bmk2.url, Some(url::Url::parse("http://💖.com/💖").unwrap()));
|
||||
match bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&b2id), false)?.unwrap() {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
assert_eq!(bmk2.url, url::Url::parse("http://💖.com/💖").unwrap());
|
||||
|
||||
let bmk3 =
|
||||
bookmarks::public_node::fetch_bookmark(&places_db, &sync_guid(&b3id), false)?.unwrap();
|
||||
assert_eq!(bmk3.url, Some(url::Url::parse("http://😍.com/😍").unwrap()));
|
||||
match bookmarks::fetch::fetch_bookmark(&places_db, &sync_guid(&b3id), false)?.unwrap() {
|
||||
Item::Bookmark { b } => b,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
assert_eq!(bmk3.url, url::Url::parse("http://😍.com/😍").unwrap());
|
||||
|
||||
let mobile = bookmarks::public_node::fetch_bookmark(
|
||||
let mobile = match bookmarks::fetch::fetch_bookmark(
|
||||
&places_db,
|
||||
&sync_guid(&dogear::MOBILE_GUID),
|
||||
false,
|
||||
)?
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
{
|
||||
Item::Folder { f } => f,
|
||||
_ => panic!("expecting a bookmark"),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
mobile.child_guids,
|
||||
|
|
|
@ -7,4 +7,4 @@ license = "MPL-2.0"
|
|||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
uniffi_bindgen = "^0.15"
|
||||
uniffi_bindgen = "^0.16"
|
||||
|
|
|
@ -2,8 +2,5 @@
|
|||
# `dir` refers to the directory where the protobuf file can be found, relative to this file location.
|
||||
# (Optional) `out_dir` refers to where the compiled rust file should be saved. If not present `dir` is used.
|
||||
|
||||
["places_msg_types.proto"]
|
||||
dir = "../components/places/src/"
|
||||
|
||||
["fetch_msg_types.proto"]
|
||||
dir = "../components/viaduct/src/"
|
||||
|
|
Загрузка…
Ссылка в новой задаче