Parse xml
This commit is contained in:
parent
9794f37842
commit
60f3200479
@ -14,6 +14,7 @@ crate-type = ["cdylib"]
|
|||||||
async-trait = "0.1.74"
|
async-trait = "0.1.74"
|
||||||
base64 = "0.21.5"
|
base64 = "0.21.5"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
|
quick-xml = "0.31.0"
|
||||||
worker = "0.0.18"
|
worker = "0.0.18"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
151
src/dav.rs
151
src/dav.rs
@ -2,7 +2,7 @@ use worker::ByteStream;
|
|||||||
|
|
||||||
use crate::r2::R2;
|
use crate::r2::R2;
|
||||||
use crate::values::{Depth, Overwrite, Range};
|
use crate::values::{Depth, Overwrite, Range};
|
||||||
use crate::xml::XMLBuilder;
|
use crate::xml::XMLNode;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::option::Option;
|
use std::option::Option;
|
||||||
|
|
||||||
@ -69,9 +69,15 @@ impl Dav {
|
|||||||
depth: Depth,
|
depth: Depth,
|
||||||
req_body: String,
|
req_body: String,
|
||||||
) -> Result<DavResponse, DavErrResponse> {
|
) -> Result<DavResponse, DavErrResponse> {
|
||||||
// if req_body.len() > 0 {
|
let mut xml;
|
||||||
// return Err((415, None, None));
|
if req_body.len() > 0 {
|
||||||
// }
|
match XMLNode::parse_xml(&req_body) {
|
||||||
|
Ok(v) => xml = v,
|
||||||
|
Err(_) => return Err((415, None, None)),
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return Err((415, None, None));
|
||||||
|
}
|
||||||
|
|
||||||
let mut headers = HashMap::new();
|
let mut headers = HashMap::new();
|
||||||
headers.insert(
|
headers.insert(
|
||||||
@ -81,7 +87,7 @@ impl Dav {
|
|||||||
|
|
||||||
match depth {
|
match depth {
|
||||||
Depth::One => {
|
Depth::One => {
|
||||||
let mut multistatus = XMLBuilder::new(
|
let mut multistatus = XMLNode::new(
|
||||||
"D:multistatus".to_string(),
|
"D:multistatus".to_string(),
|
||||||
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
|
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
|
||||||
None,
|
None,
|
||||||
@ -89,45 +95,44 @@ impl Dav {
|
|||||||
match self.fs.list(path.clone()).await {
|
match self.fs.list(path.clone()).await {
|
||||||
Ok(items) => {
|
Ok(items) => {
|
||||||
for (href, properties) in items {
|
for (href, properties) in items {
|
||||||
let mut response =
|
let response = multistatus.elem("D:response".to_string(), None, None);
|
||||||
XMLBuilder::new("D:response".to_string(), None, None);
|
|
||||||
response.elem("D:href".to_string(), None, Some(href));
|
response.elem("D:href".to_string(), None, Some(href));
|
||||||
let mut propstat =
|
let propstat = response.elem("D:propstat".to_string(), None, None);
|
||||||
XMLBuilder::new("D:propstat".to_string(), None, None);
|
|
||||||
let mut prop = XMLBuilder::new("D:prop".to_string(), None, None);
|
|
||||||
prop.elem("D:creationdate".to_string(), None, properties.creation_date);
|
|
||||||
prop.elem("D:displayname".to_string(), None, properties.display_name);
|
|
||||||
prop.elem(
|
|
||||||
"D:getcontentlanguage".to_string(),
|
|
||||||
None,
|
|
||||||
properties.get_content_language,
|
|
||||||
);
|
|
||||||
prop.elem(
|
|
||||||
"D:getcontentlength".to_string(),
|
|
||||||
None,
|
|
||||||
properties
|
|
||||||
.get_content_length
|
|
||||||
.map_or(None, |v| Some(v.to_string())),
|
|
||||||
);
|
|
||||||
prop.elem(
|
|
||||||
"D:getcontenttype".to_string(),
|
|
||||||
None,
|
|
||||||
properties.get_content_type,
|
|
||||||
);
|
|
||||||
prop.elem("D:getetag".to_string(), None, properties.get_etag);
|
|
||||||
prop.elem(
|
|
||||||
"D:getlastmodified".to_string(),
|
|
||||||
None,
|
|
||||||
properties.get_last_modified,
|
|
||||||
);
|
|
||||||
propstat.add(prop);
|
|
||||||
propstat.elem(
|
propstat.elem(
|
||||||
"D:status".to_string(),
|
"D:status".to_string(),
|
||||||
None,
|
None,
|
||||||
Some("HTTP/1.1 200 OK".to_string()),
|
Some("HTTP/1.1 200 OK".to_string()),
|
||||||
);
|
);
|
||||||
response.add(propstat);
|
let prop = propstat.elem("D:prop".to_string(), None, None);
|
||||||
multistatus.add(response);
|
properties
|
||||||
|
.creation_date
|
||||||
|
.map(|v| prop.elem("D:creationdate".to_string(), None, Some(v)));
|
||||||
|
properties
|
||||||
|
.display_name
|
||||||
|
.map(|v| prop.elem("D:displayname".to_string(), None, Some(v)));
|
||||||
|
properties.get_content_language.map(|v| {
|
||||||
|
prop.elem("D:getcontentlanguage".to_string(), None, Some(v))
|
||||||
|
});
|
||||||
|
properties.get_content_length.map(|v| {
|
||||||
|
prop.elem(
|
||||||
|
"D:getcontentlength".to_string(),
|
||||||
|
None,
|
||||||
|
Some(v.to_string()),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
properties
|
||||||
|
.get_content_type
|
||||||
|
.map(|v| prop.elem("D:getcontenttype".to_string(), None, Some(v)));
|
||||||
|
properties
|
||||||
|
.get_etag
|
||||||
|
.map(|v| prop.elem("D:getetag".to_string(), None, Some(v)));
|
||||||
|
properties.get_last_modified.map(|v| {
|
||||||
|
prop.elem(
|
||||||
|
"D:getlastmodified".to_string(),
|
||||||
|
None,
|
||||||
|
Some(v.to_string()),
|
||||||
|
)
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((207, headers, multistatus.build()))
|
Ok((207, headers, multistatus.build()))
|
||||||
@ -136,13 +141,13 @@ impl Dav {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Depth::Zero => {
|
Depth::Zero => {
|
||||||
let mut multistatus = XMLBuilder::new(
|
let mut multistatus = XMLNode::new(
|
||||||
"D:multistatus".to_string(),
|
"D:multistatus".to_string(),
|
||||||
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
|
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
match self.fs.get(path.clone()).await {
|
match self.fs.get(path.clone()).await {
|
||||||
Ok((href, properties)) => {
|
Ok((href, properties, _, custom_metadata)) => {
|
||||||
let response = multistatus.elem("D:response".to_string(), None, None);
|
let response = multistatus.elem("D:response".to_string(), None, None);
|
||||||
response.elem("D:href".to_string(), None, Some(href));
|
response.elem("D:href".to_string(), None, Some(href));
|
||||||
let propstat = response.elem("D:propstat".to_string(), None, None);
|
let propstat = response.elem("D:propstat".to_string(), None, None);
|
||||||
@ -152,31 +157,31 @@ impl Dav {
|
|||||||
Some("HTTP/1.1 200 OK".to_string()),
|
Some("HTTP/1.1 200 OK".to_string()),
|
||||||
);
|
);
|
||||||
let prop = propstat.elem("D:prop".to_string(), None, None);
|
let prop = propstat.elem("D:prop".to_string(), None, None);
|
||||||
prop.elem("D:creationdate".to_string(), None, properties.creation_date);
|
properties
|
||||||
prop.elem("D:displayname".to_string(), None, properties.display_name);
|
.creation_date
|
||||||
prop.elem(
|
.map(|v| prop.elem("D:creationdate".to_string(), None, Some(v)));
|
||||||
"D:getcontentlanguage".to_string(),
|
properties
|
||||||
None,
|
.display_name
|
||||||
properties.get_content_language,
|
.map(|v| prop.elem("D:displayname".to_string(), None, Some(v)));
|
||||||
);
|
properties
|
||||||
prop.elem(
|
.get_content_language
|
||||||
"D:getcontentlength".to_string(),
|
.map(|v| prop.elem("D:getcontentlanguage".to_string(), None, Some(v)));
|
||||||
None,
|
properties.get_content_length.map(|v| {
|
||||||
properties
|
prop.elem("D:getcontentlength".to_string(), None, Some(v.to_string()))
|
||||||
.get_content_length
|
});
|
||||||
.map_or(None, |v| Some(v.to_string())),
|
properties
|
||||||
);
|
.get_content_type
|
||||||
prop.elem(
|
.map(|v| prop.elem("D:getcontenttype".to_string(), None, Some(v)));
|
||||||
"D:getcontenttype".to_string(),
|
properties
|
||||||
None,
|
.get_etag
|
||||||
properties.get_content_type,
|
.map(|v| prop.elem("D:getetag".to_string(), None, Some(v)));
|
||||||
);
|
properties.get_last_modified.map(|v| {
|
||||||
prop.elem("D:getetag".to_string(), None, properties.get_etag);
|
prop.elem("D:getlastmodified".to_string(), None, Some(v.to_string()))
|
||||||
prop.elem(
|
});
|
||||||
"D:getlastmodified".to_string(),
|
|
||||||
None,
|
for (key, value) in custom_metadata {
|
||||||
properties.get_last_modified,
|
prop.elem(key, None, Some(value));
|
||||||
);
|
}
|
||||||
|
|
||||||
Ok((207, (headers), (multistatus.build())))
|
Ok((207, (headers), (multistatus.build())))
|
||||||
}
|
}
|
||||||
@ -212,7 +217,7 @@ impl Dav {
|
|||||||
"Content-Type".to_string(),
|
"Content-Type".to_string(),
|
||||||
"application/xml; charset=utf-8".to_string(),
|
"application/xml; charset=utf-8".to_string(),
|
||||||
);
|
);
|
||||||
let mut multistatus = XMLBuilder::new(
|
let mut multistatus = XMLNode::new(
|
||||||
"D:multistatus".to_string(),
|
"D:multistatus".to_string(),
|
||||||
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
|
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
|
||||||
None,
|
None,
|
||||||
@ -249,7 +254,7 @@ impl Dav {
|
|||||||
range: Range,
|
range: Range,
|
||||||
) -> Result<DavStreamResponse, DavErrResponse> {
|
) -> Result<DavStreamResponse, DavErrResponse> {
|
||||||
match self.fs.download(path, range.clone()).await {
|
match self.fs.download(path, range.clone()).await {
|
||||||
Ok((properties, stream)) => {
|
Ok((properties, response_headers, stream)) => {
|
||||||
let mut headers: HashMap<String, String> = HashMap::new();
|
let mut headers: HashMap<String, String> = HashMap::new();
|
||||||
headers.insert("Accept-Ranges".to_string(), "bytes".to_string());
|
headers.insert("Accept-Ranges".to_string(), "bytes".to_string());
|
||||||
headers.insert(
|
headers.insert(
|
||||||
@ -270,6 +275,18 @@ impl Dav {
|
|||||||
properties
|
properties
|
||||||
.get_last_modified
|
.get_last_modified
|
||||||
.map(|v| headers.insert("Last-Modified".to_string(), v));
|
.map(|v| headers.insert("Last-Modified".to_string(), v));
|
||||||
|
response_headers
|
||||||
|
.cache_control
|
||||||
|
.map(|v| headers.insert("Cache-Control".to_string(), v));
|
||||||
|
response_headers
|
||||||
|
.cache_expiry
|
||||||
|
.map(|v| headers.insert("Expires".to_string(), v.to_string()));
|
||||||
|
response_headers
|
||||||
|
.content_disposition
|
||||||
|
.map(|v| headers.insert("Content-Disposition".to_string(), v));
|
||||||
|
response_headers
|
||||||
|
.content_encoding
|
||||||
|
.map(|v| headers.insert("Content-Encoding".to_string(), v));
|
||||||
match (range.start, range.end) {
|
match (range.start, range.end) {
|
||||||
(Some(start), Some(end)) => {
|
(Some(start), Some(end)) => {
|
||||||
headers.insert(
|
headers.insert(
|
||||||
|
71
src/r2.rs
71
src/r2.rs
@ -1,4 +1,6 @@
|
|||||||
use crate::values::{DavProperties, Range};
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use crate::values::{DavProperties, HttpResponseHeaders, Range};
|
||||||
use worker::{console_debug, Bucket, ByteStream, FixedLengthStream, Headers, Range as R2Range};
|
use worker::{console_debug, Bucket, ByteStream, FixedLengthStream, Headers, Range as R2Range};
|
||||||
|
|
||||||
pub struct R2 {
|
pub struct R2 {
|
||||||
@ -10,10 +12,26 @@ impl R2 {
|
|||||||
R2 { bucket }
|
R2 { bucket }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(&self, path: String) -> Result<(String, DavProperties), String> {
|
pub async fn get(
|
||||||
|
&self,
|
||||||
|
path: String,
|
||||||
|
) -> Result<
|
||||||
|
(
|
||||||
|
String,
|
||||||
|
DavProperties,
|
||||||
|
HttpResponseHeaders,
|
||||||
|
HashMap<String, String>,
|
||||||
|
),
|
||||||
|
String,
|
||||||
|
> {
|
||||||
match self.bucket.get(path).execute().await {
|
match self.bucket.get(path).execute().await {
|
||||||
Ok(f) => f.map_or(Err("Resource not found".to_string()), |file| {
|
Ok(f) => f.map_or(Err("Resource not found".to_string()), |file| {
|
||||||
Ok((file.key(), DavProperties::from(&file)))
|
Ok((
|
||||||
|
file.key(),
|
||||||
|
DavProperties::from(&file),
|
||||||
|
HttpResponseHeaders::from(file.http_metadata()),
|
||||||
|
file.custom_metadata().unwrap_or(HashMap::new()),
|
||||||
|
))
|
||||||
}),
|
}),
|
||||||
Err(error) => Err(error.to_string()),
|
Err(error) => Err(error.to_string()),
|
||||||
}
|
}
|
||||||
@ -33,14 +51,37 @@ impl R2 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn patch_metadata(&self, path: String, metadata: Headers) -> Result<(), String> {
|
pub async fn patch_metadata(
|
||||||
match self.bucket.get(path).execute().await {
|
&self,
|
||||||
Ok(f) => f.map_or(Err("Resource not found".to_string()), |file| {
|
path: String,
|
||||||
match file.write_http_metadata(metadata) {
|
metadata: HashMap<String, String>,
|
||||||
Ok(_) => Ok(()),
|
) -> Result<HashMap<String, String>, String> {
|
||||||
Err(error) => Err(error.to_string()),
|
match self.bucket.get(path.clone()).execute().await {
|
||||||
|
Ok(f) => match f {
|
||||||
|
Some(file) => {
|
||||||
|
let stream = match file.body() {
|
||||||
|
Some(body) => match body.stream() {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(e) => return Err(e.to_string()),
|
||||||
|
},
|
||||||
|
None => return Err("Failed to get file body stream".to_string()),
|
||||||
|
};
|
||||||
|
match self
|
||||||
|
.bucket
|
||||||
|
.put(path, FixedLengthStream::wrap(stream, file.size().into()))
|
||||||
|
.custom_metadata(metadata)
|
||||||
|
.execute()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(file) => match file.custom_metadata() {
|
||||||
|
Ok(metadata) => Ok(metadata),
|
||||||
|
Err(e) => Err(e.to_string()),
|
||||||
|
},
|
||||||
|
Err(error) => Err(error.to_string()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}),
|
None => Err("Resource not found".to_string()),
|
||||||
|
},
|
||||||
Err(error) => Err(error.to_string()),
|
Err(error) => Err(error.to_string()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -49,7 +90,7 @@ impl R2 {
|
|||||||
&self,
|
&self,
|
||||||
path: String,
|
path: String,
|
||||||
range: Range,
|
range: Range,
|
||||||
) -> Result<(DavProperties, ByteStream), String> {
|
) -> Result<(DavProperties, HttpResponseHeaders, ByteStream), String> {
|
||||||
let r2range: Option<R2Range> = match (range.start, range.end) {
|
let r2range: Option<R2Range> = match (range.start, range.end) {
|
||||||
(Some(start), Some(end)) => Some(R2Range::OffsetWithLength {
|
(Some(start), Some(end)) => Some(R2Range::OffsetWithLength {
|
||||||
offset: start,
|
offset: start,
|
||||||
@ -78,7 +119,13 @@ impl R2 {
|
|||||||
.map_or(Err("Failed to get file body stream".to_string()), |b| {
|
.map_or(Err("Failed to get file body stream".to_string()), |b| {
|
||||||
b.stream().map_or(
|
b.stream().map_or(
|
||||||
Err("Failed to get file body stream".to_string()),
|
Err("Failed to get file body stream".to_string()),
|
||||||
|stream| Ok((DavProperties::from(&file), stream)),
|
|stream| {
|
||||||
|
Ok((
|
||||||
|
DavProperties::from(&file),
|
||||||
|
HttpResponseHeaders::from(file.http_metadata()),
|
||||||
|
stream,
|
||||||
|
))
|
||||||
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use worker::{console_debug, Object};
|
use worker::{console_debug, Date, HttpMetadata, Object};
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Hash, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Hash, Eq)]
|
||||||
pub enum Depth {
|
pub enum Depth {
|
||||||
@ -106,3 +106,22 @@ impl From<&Object> for DavProperties {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||||
|
pub struct HttpResponseHeaders {
|
||||||
|
pub content_disposition: Option<String>,
|
||||||
|
pub content_encoding: Option<String>,
|
||||||
|
pub cache_control: Option<String>,
|
||||||
|
pub cache_expiry: Option<Date>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<HttpMetadata> for HttpResponseHeaders {
|
||||||
|
fn from(value: HttpMetadata) -> Self {
|
||||||
|
HttpResponseHeaders {
|
||||||
|
content_disposition: value.content_disposition,
|
||||||
|
content_encoding: value.content_encoding,
|
||||||
|
cache_control: value.cache_control,
|
||||||
|
cache_expiry: value.cache_expiry,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
102
src/xml.rs
102
src/xml.rs
@ -1,19 +1,22 @@
|
|||||||
|
use quick_xml::events::Event;
|
||||||
|
use quick_xml::reader::Reader;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
pub struct XMLBuilder {
|
#[derive(Default, Debug, Clone)]
|
||||||
name: String,
|
pub struct XMLNode {
|
||||||
value: Option<String>,
|
pub name: String,
|
||||||
attributes: Option<HashMap<String, String>>,
|
pub value: Option<String>,
|
||||||
elements: Vec<XMLBuilder>,
|
pub attributes: Option<HashMap<String, String>>,
|
||||||
|
pub elements: Vec<XMLNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl XMLBuilder {
|
impl XMLNode {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
name: String,
|
name: String,
|
||||||
attributes: Option<Vec<(String, String)>>,
|
attributes: Option<Vec<(String, String)>>,
|
||||||
value: Option<String>,
|
value: Option<String>,
|
||||||
) -> XMLBuilder {
|
) -> XMLNode {
|
||||||
XMLBuilder {
|
XMLNode {
|
||||||
name,
|
name,
|
||||||
value,
|
value,
|
||||||
attributes: attributes.map(|v| v.into_iter().collect()),
|
attributes: attributes.map(|v| v.into_iter().collect()),
|
||||||
@ -26,13 +29,13 @@ impl XMLBuilder {
|
|||||||
name: String,
|
name: String,
|
||||||
attributes: Option<Vec<(String, String)>>,
|
attributes: Option<Vec<(String, String)>>,
|
||||||
value: Option<String>,
|
value: Option<String>,
|
||||||
) -> &mut XMLBuilder {
|
) -> &mut XMLNode {
|
||||||
let el = XMLBuilder::new(name, attributes, value);
|
let el = XMLNode::new(name, attributes, value);
|
||||||
self.elements.push(el);
|
self.elements.push(el);
|
||||||
self.elements.last_mut().unwrap()
|
self.elements.last_mut().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, element: XMLBuilder) {
|
pub fn add(&mut self, element: XMLNode) {
|
||||||
self.elements.push(element);
|
self.elements.push(element);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,7 +46,7 @@ impl XMLBuilder {
|
|||||||
xml.join("")
|
xml.join("")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_element(&self, element: &XMLBuilder) -> String {
|
fn write_element(&self, element: &XMLNode) -> String {
|
||||||
let mut xml = Vec::new();
|
let mut xml = Vec::new();
|
||||||
// attributes
|
// attributes
|
||||||
let mut attrs = Vec::new();
|
let mut attrs = Vec::new();
|
||||||
@ -69,22 +72,91 @@ impl XMLBuilder {
|
|||||||
xml.push(format!("</{}>", element.name));
|
xml.push(format!("</{}>", element.name));
|
||||||
xml.join("")
|
xml.join("")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn parse_xml(xml: &str) -> Result<XMLNode, String> {
|
||||||
|
let mut reader = Reader::from_str(xml);
|
||||||
|
reader.trim_text(true);
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let mut elements: Vec<XMLNode> = Vec::new();
|
||||||
|
let mut stack: Vec<(String, HashMap<String, String>, String)> = Vec::new();
|
||||||
|
loop {
|
||||||
|
match reader.read_event_into(&mut buf) {
|
||||||
|
Ok(Event::Start(ref e)) => {
|
||||||
|
stack.push((
|
||||||
|
std::str::from_utf8(e.name().as_ref()).unwrap().to_string(),
|
||||||
|
e.attributes()
|
||||||
|
.map(|a| {
|
||||||
|
let a = a.unwrap();
|
||||||
|
(
|
||||||
|
std::str::from_utf8(a.key.as_ref()).unwrap().to_string(),
|
||||||
|
std::str::from_utf8(a.value.as_ref()).unwrap().to_string(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
"".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(Event::End(_)) => {
|
||||||
|
stack.pop().map(|(name, attributes, value)| {
|
||||||
|
let mut element =
|
||||||
|
XMLNode::new(name, Some(attributes.into_iter().collect()), Some(value));
|
||||||
|
match elements.pop() {
|
||||||
|
None => {
|
||||||
|
let _ = &elements.push(element.clone());
|
||||||
|
}
|
||||||
|
Some(c) => {
|
||||||
|
element.add(c);
|
||||||
|
let _ = &elements.push(element);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(Event::Text(e)) => {
|
||||||
|
stack.pop().map(|(name, attributes, _)| {
|
||||||
|
stack.push((name, attributes, e.unescape().unwrap().into_owned()));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(Event::Eof) => break,
|
||||||
|
Err(e) => {
|
||||||
|
return Err(format!(
|
||||||
|
"Error at position {}: {:?}",
|
||||||
|
reader.buffer_position(),
|
||||||
|
e
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
buf.clear();
|
||||||
|
}
|
||||||
|
if elements.len() == 1 {
|
||||||
|
Ok(elements.pop().unwrap())
|
||||||
|
} else {
|
||||||
|
Err(format!("XMLNode parse error, {:?}", elements))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::xml::XMLBuilder;
|
use crate::xml::XMLNode;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn xml_build() {
|
fn xml_build() {
|
||||||
let mut xml = XMLBuilder::new("root".to_string(), None, None);
|
let mut xml = XMLNode::new("root".to_string(), None, None);
|
||||||
xml.elem("child".to_string(), None, None)
|
xml.elem("child".to_string(), None, None)
|
||||||
.elem("grandchild".to_string(), None, None)
|
.elem("grandchild".to_string(), None, None)
|
||||||
.add(XMLBuilder::new(
|
.add(XMLNode::new(
|
||||||
"greatgrandchild".to_string(),
|
"greatgrandchild".to_string(),
|
||||||
None,
|
None,
|
||||||
Some("value".to_string()),
|
Some("value".to_string()),
|
||||||
));
|
));
|
||||||
assert!(xml.build() == "<?xml version=\"1.0\" encoding=\"utf-8\"?><root><child><grandchild><greatgrandchild>value</greatgrandchild></grandchild></child></root>")
|
assert!(xml.build() == "<?xml version=\"1.0\" encoding=\"utf-8\"?><root><child><grandchild><greatgrandchild>value</greatgrandchild></grandchild></child></root>")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn xml_parse() {
|
||||||
|
let xml = "<?xml version=\"1.0\" encoding=\"utf-8\"?><root><child><grandchild><greatgrandchild>value</greatgrandchild></grandchild></child></root>";
|
||||||
|
let xml = XMLNode::parse_xml(xml).unwrap();
|
||||||
|
assert!(xml.build() == "<?xml version=\"1.0\" encoding=\"utf-8\"?><root><child><grandchild><greatgrandchild>value</greatgrandchild></grandchild></child></root>", "{}", xml.build())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user