Initial commit

This commit is contained in:
abersheeran 2023-10-25 17:17:24 +08:00
parent 8cc8a855eb
commit 0f85b31208
10 changed files with 1020 additions and 0 deletions

23
.gitignore vendored Normal file
View File

@ -0,0 +1,23 @@
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# node.js
node_modules/
# wrangler
.wrangler/
# VSCode
.vscode/

22
Cargo.toml Normal file
View File

@ -0,0 +1,22 @@
[package]
name = "worker-rust"
version = "0.1.0"
edition = "2021"
# https://github.com/rustwasm/wasm-pack/issues/1247
[package.metadata.wasm-pack.profile.release]
wasm-opt = false
[lib]
crate-type = ["cdylib"]
[dependencies]
async-trait = "0.1.74"
base64 = "0.21.5"
lazy_static = "1.4.0"
worker = "0.0.15"
[profile.release]
lto = true
strip = true
codegen-units = 1

51
README.md Normal file
View File

@ -0,0 +1,51 @@
# Template: worker-rust
[![Deploy to Cloudflare Workers](https://deploy.workers.cloudflare.com/button)](https://deploy.workers.cloudflare.com/?url=https://github.com/cloudflare/templates/tree/main/worker-rust)
A template for kick starting a Cloudflare worker project using [`workers-rs`](https://github.com/cloudflare/workers-rs).
This template is designed for compiling Rust to WebAssembly and publishing the resulting worker to Cloudflare's [edge infrastructure](https://www.cloudflare.com/network/).
## Setup
To create a `my-project` directory using this template, run:
```sh
$ npx wrangler generate my-project https://github.com/cloudflare/workers-sdk/templates/experimental/worker-rust
# or
$ yarn wrangler generate my-project https://github.com/cloudflare/workers-sdk/templates/experimental/worker-rust
# or
$ pnpm wrangler generate my-project https://github.com/cloudflare/workers-sdk/templates/experimental/worker-rust
```
## Wrangler
Wrangler is used to develop, deploy, and configure your Worker via CLI.
Further documentation for Wrangler can be found [here](https://developers.cloudflare.com/workers/tooling/wrangler).
## Usage
This template starts you off with a `src/lib.rs` file, acting as an entrypoint for requests hitting your Worker. Feel free to add more code in this file, or create Rust modules anywhere else for this project to use.
With `wrangler`, you can build, test, and deploy your Worker with the following commands:
```sh
# run your Worker in an ideal development workflow (with a local server, file watcher & more)
$ npm run dev
# deploy your Worker globally to the Cloudflare network (update your wrangler.toml file for configuration)
$ npm run deploy
```
Read the latest `worker` crate documentation here: https://docs.rs/worker
## WebAssembly
`workers-rs` (the Rust SDK for Cloudflare Workers used in this template) is meant to be executed as compiled WebAssembly, and as such so **must** all the code you write and depend upon. All crates and modules used in Rust-based Workers projects have to compile to the `wasm32-unknown-unknown` triple.
Read more about this on the [`workers-rs`](https://github.com/cloudflare/workers-rs) project README.
## Issues
If you have any problems with the `worker` crate, please open an issue on the upstream project issue tracker on the [`workers-rs` repository](https://github.com/cloudflare/workers-rs).

12
package.json Normal file
View File

@ -0,0 +1,12 @@
{
"name": "r2-webdav",
"version": "0.0.0",
"private": true,
"scripts": {
"deploy": "wrangler deploy",
"dev": "wrangler dev --local"
},
"devDependencies": {
"wrangler": "^2.13.0"
}
}

346
src/dav.rs Normal file
View File

@ -0,0 +1,346 @@
use worker::ByteStream;
use crate::r2::R2;
use crate::values::{Depth, Overwrite, Range};
use crate::xml::XMLBuilder;
use std::collections::HashMap;
use std::option::Option;
pub struct Dav {
fs: R2,
}
type DavResponse = (u16, HashMap<String, String>, String);
type DavErrResponse = (u16, Option<HashMap<String, String>>, Option<String>);
type DavStreamResponse = (u16, HashMap<String, String>, ByteStream);
pub enum DavResponseType {
DavResponse(Result<DavResponse, DavErrResponse>),
DavStreamResponse(Result<DavStreamResponse, DavErrResponse>),
}
impl From<Result<DavResponse, DavErrResponse>> for DavResponseType {
fn from(value: Result<DavResponse, DavErrResponse>) -> Self {
DavResponseType::DavResponse(value)
}
}
impl From<Result<DavStreamResponse, DavErrResponse>> for DavResponseType {
fn from(value: Result<DavStreamResponse, DavErrResponse>) -> Self {
DavResponseType::DavStreamResponse(value)
}
}
static DAV_CLASS: &str = "1";
static SUPPORT_METHODS: [&str; 8] = [
"OPTIONS", "PROPFIND", "MKCOL", "GET", "HEAD", "PUT", "COPY", "MOVE",
];
impl Dav {
pub fn new(fs: R2) -> Dav {
Dav { fs }
}
pub async fn handle_unsupport_method(&self) -> Result<DavResponse, DavErrResponse> {
let mut headers = HashMap::new();
headers.insert("DAV".to_string(), DAV_CLASS.to_string());
headers.insert("Allow".to_string(), SUPPORT_METHODS.join(", ").to_string());
return Err((405, Some(headers), None));
}
pub async fn handle_options(&self) -> Result<DavResponse, DavErrResponse> {
let mut headers = HashMap::new();
headers.insert("DAV".to_string(), DAV_CLASS.to_string());
headers.insert("Allow".to_string(), SUPPORT_METHODS.join(", ").to_string());
return Ok((204, headers, "".to_string()));
}
pub async fn handle_propfind(
&self,
path: String,
depth: Depth,
req_body: String,
) -> Result<DavResponse, DavErrResponse> {
if req_body.len() > 0 {
return Err((415, None, None));
}
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
"application/xml; charset=utf-8".to_string(),
);
match depth {
Depth::One => {
let mut multistatus = XMLBuilder::new(
"D:multistatus".to_string(),
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
None,
);
match self.fs.list(path).await {
Ok(items) => {
for (href, properties) in items {
let mut response =
XMLBuilder::new("D:response".to_string(), None, None);
response.elem("D:href".to_string(), None, Some(href));
let mut propstat =
XMLBuilder::new("D:propstat".to_string(), None, None);
let mut prop = XMLBuilder::new("D:prop".to_string(), None, None);
prop.elem("D:creationdate".to_string(), None, properties.creation_date);
prop.elem("D:displayname".to_string(), None, properties.display_name);
prop.elem(
"D:getcontentlanguage".to_string(),
None,
properties.get_content_language,
);
prop.elem(
"D:getcontentlength".to_string(),
None,
properties
.get_content_length
.map_or(None, |v| Some(v.to_string())),
);
prop.elem(
"D:getcontenttype".to_string(),
None,
properties.get_content_type,
);
prop.elem("D:getetag".to_string(), None, properties.get_etag);
prop.elem(
"D:getlastmodified".to_string(),
None,
properties.get_last_modified,
);
propstat.add(prop);
propstat.elem(
"D:status".to_string(),
None,
Some("HTTP/1.1 200 OK".to_string()),
);
response.add(propstat);
multistatus.add(response);
}
Ok((207, headers, multistatus.build()))
}
Err(_) => return Err((404, None, None)),
}
}
Depth::Zero => {
let mut multistatus = XMLBuilder::new(
"D:multistatus".to_string(),
Some(vec![("xmlns:D".to_string(), "DAV:".to_string())]),
None,
);
match self.fs.get(path).await {
Ok((href, properties)) => {
let mut response = XMLBuilder::new("D:response".to_string(), None, None);
response.elem("D:href".to_string(), None, Some(href));
let mut propstat = XMLBuilder::new("D:propstat".to_string(), None, None);
let mut prop = XMLBuilder::new("D:prop".to_string(), None, None);
prop.elem("D:creationdate".to_string(), None, properties.creation_date);
prop.elem("D:displayname".to_string(), None, properties.display_name);
prop.elem(
"D:getcontentlanguage".to_string(),
None,
properties.get_content_language,
);
prop.elem(
"D:getcontentlength".to_string(),
None,
properties
.get_content_length
.map_or(None, |v| Some(v.to_string())),
);
prop.elem(
"D:getcontenttype".to_string(),
None,
properties.get_content_type,
);
prop.elem("D:getetag".to_string(), None, properties.get_etag);
prop.elem(
"D:getlastmodified".to_string(),
None,
properties.get_last_modified,
);
propstat.add(prop);
propstat.elem(
"D:status".to_string(),
None,
Some("HTTP/1.1 200 OK".to_string()),
);
response.add(propstat);
multistatus.add(response);
Ok((207, (headers), (multistatus.build())))
}
Err(_) => return Err((404, None, None)),
}
}
Depth::Infinity => return Err((400, None, None)),
}
}
pub async fn handle_mkcol(
&self,
path: String,
req_body: String,
) -> Result<DavResponse, DavErrResponse> {
if req_body.len() > 0 {
return Err((415, None, None));
}
Ok((201, HashMap::new(), "".to_string()))
// R2 unsupport create empty directory
// Err((403, None, None))
}
pub async fn handle_get_obj(
&self,
path: String,
range: Range,
) -> Result<DavStreamResponse, DavErrResponse> {
match self.fs.download(path, range.clone()).await {
Ok((properties, stream)) => {
let mut headers: HashMap<String, String> = HashMap::new();
headers.insert("Accept-Ranges".to_string(), "bytes".to_string());
headers.insert(
"Content-Type".to_string(),
properties
.get_content_type
.map_or("application/octet-stream".to_string(), |v| v),
);
headers.insert(
"Content-Length".to_string(),
properties
.get_content_length
.map_or("0".to_string(), |v| v.to_string()),
);
properties
.get_etag
.map(|v| headers.insert("ETag".to_string(), v));
properties
.get_last_modified
.map(|v| headers.insert("Last-Modified".to_string(), v));
match (range.start, range.end) {
(Some(start), Some(end)) => {
headers.insert(
"Content-Range".to_string(),
format!("bytes {}-{}/{}", start, end, end - start + 1),
);
Ok((206, (headers), stream))
}
_ => Ok((200, (headers), stream)),
}
}
Err(_) => return Err((404, None, None)),
}
}
pub async fn handle_get_dir(&self, path: String) -> Result<DavResponse, DavErrResponse> {
match self.fs.list(path).await {
Ok(items) => {
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
"application/html; charset=utf-8".to_string(),
);
let html = items
.iter()
.map(|item| {
format!(
"<a href=\"{}\">{}</a>",
&item.0,
match &item.1.display_name {
Some(display_name) => display_name,
None => &item.0,
}
)
})
.collect::<Vec<_>>()
.join(", ");
return Ok((200, (headers), (html)));
}
Err(_) => return Err((404, None, None)),
}
}
pub async fn handle_head_obj(
&self,
path: String,
range: Range,
) -> Result<DavResponse, DavErrResponse> {
match self.handle_get_obj(path, range).await {
Ok((status_code, headers, _)) => Ok((status_code, headers, "".to_string())),
Err(e) => Err(e),
}
}
pub async fn handle_head_dir(&self, path: String) -> Result<DavResponse, DavErrResponse> {
match self.handle_get_dir(path).await {
Ok((status_code, headers, _)) => Ok((status_code, headers, "".to_string())),
Err(e) => Err(e),
}
}
pub async fn handle_delete(&self, path: String) -> Result<DavResponse, DavErrResponse> {
match self.fs.delete(path).await {
Ok(()) => Ok((204, HashMap::new(), "".to_string())),
Err(error) => Err((400, None, Some(error.to_string()))),
}
}
pub async fn handle_put(
&self,
path: String,
stream: ByteStream,
content_length: u64,
) -> Result<DavResponse, DavErrResponse> {
if path.ends_with("/") {
return Err((405, None, None));
}
match self.fs.put(path, stream, content_length).await {
Ok(properties) => {
println!("{:?}", properties);
Ok((201, HashMap::new(), "".to_string()))
}
Err(error) => Err((400, None, Some(error.to_string()))),
}
}
pub async fn handle_copy(
&self,
path: String,
destination: String,
depth: Depth,
overwrite: Overwrite,
) -> Result<DavResponse, DavErrResponse> {
if path.ends_with("/") {
match depth {
Depth::Zero => Err((400, None, Some("Unsupported copy collection".to_string()))),
Depth::Infinity => Ok((200, HashMap::new(), "".to_string())),
_ => Err((400, None, Some("Unsupported copy depth".to_string()))),
}
} else {
Err((400, None, Some("Unsupported copy resource".to_string())))
}
}
pub async fn handle_move(
&self,
path: String,
destination: String,
depth: Depth,
overwrite: Overwrite,
) -> Result<DavResponse, DavErrResponse> {
if path.ends_with("/") {
match depth {
Depth::Zero => Err((400, None, Some("Unsupported move collection".to_string()))),
Depth::Infinity => Ok((200, HashMap::new(), "".to_string())),
_ => Err((400, None, Some("Unsupported move depth".to_string()))),
}
} else {
Err((400, None, Some("Unsupported move resource".to_string())))
}
}
}

254
src/lib.rs Normal file
View File

@ -0,0 +1,254 @@
use crate::values::Depth;
use base64;
use dav::DavResponseType;
use r2::R2;
use values::Overwrite;
use worker::*;
mod dav;
mod r2;
mod values;
mod xml;
#[event(fetch)]
async fn main(req: Request, env: Env, _: Context) -> Result<Response> {
let username = env.var("USERNAME").unwrap().to_string();
let password = env.var("PASSWORD").unwrap().to_string();
let protocol = env.var("PROTOCOL").unwrap().to_string();
let bucket_name = env.var("BUCKET_NAME").unwrap().to_string();
if let Some(r) = basic_authorization(
req.headers().get("authorization").unwrap(),
username,
password,
) {
return r;
}
let dav = dav::Dav::new(match protocol.as_str() {
"r2" => R2::new(env.bucket(bucket_name.as_str()).unwrap()),
_ => panic!("PROTOCOL {} not supported", protocol),
});
worker(req, dav).await
}
fn basic_authorization(
authorization_header: Option<String>,
username: String,
password: String,
) -> Option<Result<Response>> {
let basic_authorization_error_response = || {
Some(Response::error("Unauthorized", 401).map(|response| {
let mut headers = Headers::new();
headers
.append("WWW-Authenticate", "Basic realm=\"webdav\"")
.unwrap();
response.with_headers(headers)
}))
};
match authorization_header {
Some(text) => {
let a: Vec<&str> = text.split(" ").collect();
if a.len() != 2 || a[0] != "Basic" {
return basic_authorization_error_response();
}
if let Ok(v) = base64::decode(a[1]) {
let v = match String::from_utf8(v) {
Ok(v) => v,
Err(_) => return basic_authorization_error_response(),
};
let v: Vec<&str> = v.split(":").collect();
if v.len() != 2 {
return basic_authorization_error_response();
}
if v[0] != username || v[1] != password {
return basic_authorization_error_response();
}
return None;
} else {
return basic_authorization_error_response();
}
}
None => {
return basic_authorization_error_response();
}
}
}
async fn worker(mut req: Request, dav: dav::Dav) -> Result<Response> {
let dav_response: DavResponseType = match req.inner().method().as_str() {
"PROPFIND" => {
let depth: Depth = req
.headers()
.get("depth")
.unwrap()
.map_or("infinity".to_string(), |v| v)
.into();
let resource_path = req.path();
dav.handle_propfind(resource_path, depth, req.text().await?)
.await
.into()
}
"OPTIONS" => dav.handle_options().await.into(),
"MKCOL" => {
let resource_path = req.path();
dav.handle_mkcol(resource_path, req.text().await?)
.await
.into()
}
"GET" => {
let resource_path = req.path();
let range = req.headers().get("range").unwrap().map_or(
values::Range {
start: None,
end: None,
},
|v| values::Range::from(v.to_string().split("bytes=").next().unwrap().to_string()),
);
if resource_path.ends_with("/") {
dav.handle_get_dir(resource_path).await.into()
} else {
dav.handle_get_obj(resource_path, range).await.into()
}
}
"HEAD" => {
let resource_path = req.path();
let range = req.headers().get("range").unwrap().map_or(
values::Range {
start: None,
end: None,
},
|v| values::Range::from(v.to_string().split("bytes=").next().unwrap().to_string()),
);
if resource_path.ends_with("/") {
dav.handle_head_dir(resource_path).await.into()
} else {
dav.handle_head_obj(resource_path, range).await.into()
}
}
"DELETE" => {
let resource_path = req.path();
dav.handle_delete(resource_path).await.into()
}
"PUT" => {
let resource_path = req.path();
let content_length = req
.headers()
.get("content-length")
.unwrap()
.map_or(0, |v| v.parse::<u64>().unwrap());
println!("content-length: {}", content_length);
dav.handle_put(resource_path, req.stream().unwrap(), content_length)
.await
.into()
}
"COPY" => {
let resource_path = req.path();
let destination =
req.headers()
.get("destination")
.unwrap()
.map_or("".to_string(), |v| {
v.split("http://")
.nth(1)
.unwrap()
.split("/")
.skip(1)
.collect::<Vec<&str>>()
.join("/")
});
let depth: Depth = req
.headers()
.get("depth")
.unwrap()
.map_or("infinity".to_string(), |v| v)
.into();
let overwrite: Overwrite = req
.headers()
.get("overwrite")
.unwrap()
.map_or("T".to_string(), |v| v.to_string())
.into();
dav.handle_copy(resource_path, destination, depth, overwrite)
.await
.into()
}
"MOVE" => {
let resource_path = req.path();
let destination =
req.headers()
.get("destination")
.unwrap()
.map_or("".to_string(), |v| {
v.split("http://")
.nth(1)
.unwrap()
.split("/")
.skip(1)
.collect::<Vec<&str>>()
.join("/")
});
let depth: Depth = req
.headers()
.get("depth")
.unwrap()
.map_or("infinity".to_string(), |v| v)
.into();
let overwrite: Overwrite = req
.headers()
.get("overwrite")
.unwrap()
.map_or("T".to_string(), |v| v.to_string())
.into();
dav.handle_move(resource_path, destination, depth, overwrite)
.await
.into()
}
_ => dav.handle_unsupport_method().await.into(),
};
match dav_response {
DavResponseType::DavResponse(r) => r.map_or_else(
|e| {
let (status_code, headers, body) = e;
Response::error(body.unwrap_or("".to_string()), status_code).map(|response| {
match headers {
Some(headers) => response.with_headers(Headers::from_iter(headers)),
None => response,
}
.with_status(status_code)
})
},
|r| {
let (status_code, headers, body) = r;
Response::from_body(ResponseBody::Body(body.into_bytes())).map(|response| {
response
.with_headers(Headers::from_iter(headers))
.with_status(status_code)
})
},
),
DavResponseType::DavStreamResponse(r) => r.map_or_else(
|e| {
let (status_code, headers, body) = e;
Response::error(body.unwrap_or("".to_string()), status_code).map(|response| {
match headers {
Some(headers) => response.with_headers(Headers::from_iter(headers)),
None => response,
}
.with_status(status_code)
})
},
|r| {
let (status_code, headers, body) = r;
Response::from_stream(body).map(|response| {
response
.with_headers(Headers::from_iter(headers))
.with_status(status_code)
})
},
),
}
}

100
src/r2.rs Normal file
View File

@ -0,0 +1,100 @@
use crate::values::{DavProperties, Range};
use worker::{Bucket, ByteStream, FixedLengthStream, Range as R2Range};
pub struct R2 {
bucket: Bucket,
}
impl R2 {
pub fn new(bucket: Bucket) -> R2 {
R2 { bucket }
}
pub async fn get(&self, path: String) -> Result<(String, DavProperties), String> {
let result = self.bucket.get(path).execute().await;
match result {
Ok(f) => f.map_or(Err("Resource not found".to_string()), |file| {
Ok((file.key(), DavProperties::from_r2(&file)))
}),
Err(error) => Err(error.to_string()),
}
}
pub async fn list(&self, path: String) -> Result<Vec<(String, DavProperties)>, String> {
match self.bucket.list().prefix(path).execute().await {
Ok(files) => {
let mut result = Vec::new();
for file in files.objects() {
result.push((file.key(), DavProperties::from_r2(&file)))
}
Ok(result)
}
Err(error) => Err(error.to_string()),
}
}
pub async fn download(
&self,
path: String,
range: Range,
) -> Result<(DavProperties, ByteStream), String> {
let r2range: Option<R2Range> = match (range.start, range.end) {
(Some(start), Some(end)) => Some(R2Range::OffsetWithLength {
offset: start,
length: end - start + 1,
}),
(Some(start), None) => Some(R2Range::OffsetWithOptionalLength {
offset: start,
length: None,
}),
(None, Some(end)) => Some(R2Range::OptionalOffsetWithLength {
offset: None,
length: end,
}),
(None, None) => None,
};
let path_clone = path.clone();
let result = r2range
.map_or(self.bucket.get(path), |r| {
self.bucket.get(path_clone).range(r)
})
.execute()
.await;
match result {
Ok(f) => f.map_or(Err("Resource not found".to_string()), |file| {
file.body()
.map_or(Err("Failed to get file body stream".to_string()), |b| {
b.stream().map_or(
Err("Failed to get file body stream".to_string()),
|stream| Ok((DavProperties::from_r2(&file), stream)),
)
})
}),
Err(error) => Err(error.to_string()),
}
}
pub async fn delete(&self, path: String) -> Result<(), String> {
match self.bucket.delete(path).await {
Ok(_) => Ok(()),
Err(error) => Err(error.to_string()),
}
}
pub async fn put(
&self,
path: String,
stream: ByteStream,
content_length: u64,
) -> Result<DavProperties, String> {
match self
.bucket
.put(path, FixedLengthStream::wrap(stream, content_length))
.execute()
.await
{
Ok(file) => Ok(DavProperties::from_r2(&file)),
Err(error) => Err(error.to_string()),
}
}
}

106
src/values.rs Normal file
View File

@ -0,0 +1,106 @@
use worker::Object;
#[derive(Default, Debug, Clone, PartialEq, Hash, Eq)]
pub enum Depth {
Zero,
One,
#[default]
Infinity,
}
impl From<String> for Depth {
fn from(m: String) -> Self {
match m.to_ascii_uppercase().as_str() {
"0" => Depth::Zero,
"1" => Depth::One,
"infinity" => Depth::Infinity,
_ => Depth::Infinity,
}
}
}
#[derive(Default, Debug, Clone, PartialEq, Hash, Eq)]
pub struct Range {
pub start: Option<u32>,
pub end: Option<u32>,
}
impl From<Option<String>> for Range {
fn from(line: Option<String>) -> Self {
match line {
None => Range {
start: None,
end: None,
},
Some(line) => Range::from(line),
}
}
}
impl From<String> for Range {
fn from(line: String) -> Self {
if line.contains(";") {
return Range {
start: None,
end: None,
};
}
line.split("-")
.map(|v| v.parse::<u32>())
.collect::<Result<Vec<u32>, _>>()
.map_or(Range::from(None), |v| match v.len() {
2 => Range {
start: Some(v[0]),
end: Some(v[1]),
},
_ => Range {
start: None,
end: None,
},
})
}
}
#[derive(Default, Debug, Clone, PartialEq, Hash, Eq)]
pub enum Overwrite {
#[default]
True,
False,
}
impl From<String> for Overwrite {
fn from(value: String) -> Self {
match value.as_str() {
"F" => Overwrite::False,
"T" => Overwrite::True,
_ => Overwrite::True,
}
}
}
#[derive(Default, Debug, Clone, PartialEq, Hash, Eq)]
pub struct DavProperties {
pub creation_date: Option<String>,
pub display_name: Option<String>,
pub get_content_language: Option<String>,
pub get_content_length: Option<u64>,
pub get_content_type: Option<String>,
pub get_etag: Option<String>,
pub get_last_modified: Option<String>,
}
impl DavProperties {
pub fn from_r2(file: &Object) -> DavProperties {
let http_metedata = file.http_metadata();
DavProperties {
creation_date: Some(file.uploaded().to_string()),
display_name: http_metedata.content_disposition,
get_content_language: http_metedata.content_language,
get_content_length: Some(file.size().into()),
get_content_type: http_metedata.content_type,
get_etag: Some(file.http_etag()),
get_last_modified: None,
}
}
}

90
src/xml.rs Normal file
View File

@ -0,0 +1,90 @@
use std::collections::HashMap;
pub struct XMLBuilder {
name: String,
value: Option<String>,
attributes: Option<HashMap<String, String>>,
elements: Vec<XMLBuilder>,
}
impl XMLBuilder {
pub fn new(
name: String,
attributes: Option<Vec<(String, String)>>,
value: Option<String>,
) -> XMLBuilder {
XMLBuilder {
name,
value,
attributes: attributes.map(|v| v.into_iter().collect()),
elements: Vec::new(),
}
}
pub fn elem(
&mut self,
name: String,
attributes: Option<Vec<(String, String)>>,
value: Option<String>,
) -> &mut XMLBuilder {
let el = XMLBuilder::new(name, attributes, value);
self.elements.push(el);
self.elements.last_mut().unwrap()
}
pub fn add(&mut self, element: XMLBuilder) {
self.elements.push(element);
}
pub fn build(&self) -> String {
let mut xml = Vec::new();
xml.push("<?xml version=\"1.0\" encoding=\"utf-8\"?>".to_string());
xml.push(self.write_element(self));
xml.join("")
}
fn write_element(&self, element: &XMLBuilder) -> String {
let mut xml = Vec::new();
// attributes
let mut attrs = Vec::new();
if let Some(attributes) = &element.attributes {
for (key, value) in attributes {
attrs.push(format!("{}=\"{}\"", key, value));
}
}
if !attrs.is_empty() {
xml.push(format!("<{} {}>", element.name, attrs.join(" ")));
} else {
xml.push(format!("<{}>", element.name));
}
// value
if let Some(value) = &element.value {
xml.push(value.clone());
}
// elements
for item in &element.elements {
xml.push(self.write_element(item));
}
// end tag
xml.push(format!("</{}>", element.name));
xml.join("")
}
}
#[cfg(test)]
mod tests {
use crate::xml::XMLBuilder;
#[test]
fn xml_build() {
let mut xml = XMLBuilder::new("root".to_string(), None, None);
xml.elem("child".to_string(), None, None)
.elem("grandchild".to_string(), None, None)
.add(XMLBuilder::new(
"greatgrandchild".to_string(),
None,
Some("value".to_string()),
));
assert!(xml.build() == "<?xml version=\"1.0\" encoding=\"utf-8\"?><root><child><grandchild><greatgrandchild>value</greatgrandchild></grandchild></child></root>")
}
}

16
wrangler.toml Normal file
View File

@ -0,0 +1,16 @@
name = "r2-webdav"
main = "build/worker/shim.mjs"
compatibility_date = "2023-03-22"
[build]
command = "cargo install -q worker-build && worker-build --release"
[[r2_buckets]]
binding = 'webdav' # <~ valid JavaScript variable name
bucket_name = 'webdav'
[vars]
PROTOCOL = "r2"
BUCKET_NAME = "webdav"
USERNAME = "USERNAME"
PASSWORD = "PASSWORD"