bump syn from 1.0 to 2.0

bump `darling` from `0.14` to `0.20`
deprecated `poem-dbsession` crate
This commit is contained in:
Sunli
2023-06-21 19:18:38 +08:00
parent bb33f5555e
commit d44b48aa83
34 changed files with 147 additions and 1005 deletions

View File

@@ -17,24 +17,14 @@ jobs:
path: poem-derive
- name: poem
path: poem
options: --all-features
- name: poem-openapi-derive
path: poem-openapi-derive
- name: poem-openapi
path: poem-openapi
options: --all-features
- name: poem-lambda
path: poem-lambda
options: --all-features
- name: poem-dbsession-rustls
path: poem-dbsession
options: --features __sqlx-rustls
- name: poem-dbsession-native-tls
path: poem-dbsession
options: --features __sqlx-native-tls
- name: poem-grpc
path: poem-grpc
options: --all-features
services:
redis:
image: redis:5.0.7
@@ -74,11 +64,11 @@ jobs:
# Do tests
- name: Check With Clippy
run: cargo clippy ${{ matrix.package.options }}
run: cargo clippy --all-features
working-directory: ${{ matrix.package.path }}
- name: Run Tests
run: cargo test ${{ matrix.package.options }}
run: cargo test --all-features
working-directory: ${{ matrix.package.path }}
check-examples:

View File

@@ -30,9 +30,6 @@ jobs:
- name: poem-openapi
registryName: poem-openapi
path: poem-openapi
- name: poem-dbsession
registryName: poem-dbsession
path: poem-dbsession
- name: poem-grpc-build
registryName: poem-grpc-build
path: poem-grpc-build

View File

@@ -2,11 +2,10 @@
workspace = true
avoid_cfg_tarpaulin = true
all-features = true
exclude = ["poem-dbsession"]
exclude = []
exclude-files = [
"examples/**/*",
"poem-derive/**/*",
"poem-openapi-derive/**/*",
"poem-dbsession/**/*",
"poem-grpc-build/**/*",
]

View File

@@ -5,7 +5,6 @@ members = [
"poem-openapi-derive",
"poem-openapi",
"poem-lambda",
"poem-dbsession",
"poem-grpc-build",
"poem-grpc",
]
@@ -25,6 +24,10 @@ poem-derive = { path = "poem-derive", version = "1.3.56" }
poem-openapi-derive = { path = "poem-openapi-derive", version = "2.0.27" }
poem-grpc-build = { path = "poem-grpc-build", version = "0.2.20" }
proc-macro-crate = "1.1.0"
proc-macro2 = "1.0.29"
quote = "1.0.9"
syn = { version = "2.0" }
tokio = "1.17.0"
serde_json = "1.0.68"
serde = { version = "1.0.130", features = ["derive"] }

View File

@@ -31,11 +31,10 @@
This repo contains the following main components:
| Crate | Description | Documentation | ChangeLog |
|-------------------------------------------------------------------------------------------------------------------|--------------------------------|--------------------------------------|--------------------------------------------|
|-------------------------------------------------------------------------------------------------------------|----------------------|------------------------------------|------------------------------------------|
| **poem** [![](https://img.shields.io/crates/v/poem)](https://crates.io/crates/poem) | Poem Web | [(README)](poem/README.md) | [(CHANGELOG)](poem/CHANGELOG.md) |
| **poem-lambda** [![](https://img.shields.io/crates/v/poem-lambda)](https://crates.io/crates/poem-lambda) | Poem for AWS Lambda | [(README)](poem-lambda/README.md) | [(CHANGELOG)](poem-lambda/CHANGELOG.md) |
| **poem-openapi** [![](https://img.shields.io/crates/v/poem-openapi)](https://crates.io/crates/poem-openapi) | OpenAPI for Poem Web | [(README)](poem-openapi/README.md) | [(CHANGELOG)](poem-openapi/CHANGELOG.md) |
| **poem-dbsession** [![](https://img.shields.io/crates/v/poem-dbsession)](https://crates.io/crates/poem-dbsession) | Session storage using database | [(README)](poem-dbsession/README.md) | [(CHANGELOG)](poem-dbsession/CHANGELOG.md) |
***

View File

@@ -24,9 +24,9 @@ struct User {
/// ApiKey authorization
#[derive(SecurityScheme)]
#[oai(
type = "api_key",
ty = "api_key",
key_name = "X-API-Key",
in = "header",
key_in = "header",
checker = "api_checker"
)]
struct MyApiKeyAuthorization(User);

View File

@@ -1,24 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# [0.3.33] 2022-07-10
- Upgrade sqlx to `0.6.0` [#299](https://github.com/poem-web/poem/pull/299)
# [0.1.4] 2021-12-05
- No longer automatically clean up expired sessions in the database.
- Expose `cleanup` method.
# [0.1.3] 2021-12-03
- Change the return type of `MysqlSessionStorage::new`/`PgSessionStorage::try_new`/`SqliteSessionStorage::try_new` to `sqlx::Result`.
# [0.1.1] 2021-12-03
- Rename `MysqlSessionStorage::new` to `MysqlSessionStorage::try_new`.
- Rename `PgSessionStorage::new` to `MysqlSessionStorage::try_new`.
- Rename `SqliteSessionStorage::new` to `MysqlSessionStorage::try_new`.

View File

@@ -1,58 +0,0 @@
[package]
name = "poem-dbsession"
version = "0.3.56"
authors.workspace = true
edition.workspace = true
license.workspace = true
documentation.workspace = true
homepage.workspace = true
repository.workspace = true
rust-version.workspace = true
readme = "README.md"
description = "Session storage with database for Poem."
keywords = ["http", "web", "framework", "async"]
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
]
[package.metadata.workspaces]
independent = true
[package.metadata.docs.rs]
features = ["__sqlx-native-tls"]
[features]
default = []
sqlx-mysql-native-tls = ["sqlx/mysql", "sqlx/runtime-tokio-native-tls"]
sqlx-mysql-rustls = ["sqlx/mysql", "sqlx/runtime-tokio-rustls"]
sqlx-postgres-native-tls = ["sqlx/postgres", "sqlx/runtime-tokio-native-tls"]
sqlx-postgres-rustls = ["sqlx/postgres", "sqlx/runtime-tokio-rustls"]
sqlx-sqlite-native-tls = ["sqlx/sqlite", "sqlx/runtime-tokio-native-tls"]
sqlx-sqlite-rustls = ["sqlx/sqlite", "sqlx/runtime-tokio-rustls"]
# Don't use the following features, just for testing.
__sqlx-native-tls = [
"sqlx-mysql-native-tls",
"sqlx-postgres-native-tls",
"sqlx-sqlite-native-tls",
]
__sqlx-rustls = [
"sqlx-mysql-rustls",
"sqlx-postgres-rustls",
"sqlx-sqlite-rustls",
]
[dependencies]
poem = { workspace = true, features = ["session"], default_features = true }
chrono = { workspace = true, default-features = false, features = ["clock"] }
serde_json.workspace = true
sqlx = { version = "0.6.0", optional = true, features = ["chrono", "json"] }
tokio = { workspace = true, features = ["time"] }
tracing.workspace = true
[dev-dependencies]
tokio = { workspace = true, features = ["macros"] }

View File

@@ -1,33 +0,0 @@
Session storage using database for Poem
# Crate features
## [`sqlx`](https://crates.io/crates/sqlx)
| feature | database | tls |
|---------------------------|----------|------------|
| sqlx-mysql-rustls | mysql | rustls |
| sqlx-mysql-native-tls | mysql | native-tls |
| sqlx-postgres-rustls | postgres | rustls |
| sqlx-postgres-native-tls | postgres | native-tls |
| sqlx-sqlite-rustls | sqlite | rustls |
| sqlx-sqlite-native-tls | sqlite | native-tls |
## Example
```rust,ignore
use poem::session::{CookieConfig, ServerSession, Session};
use poem_dbsession::{sqlx::MysqlSessionStorage, DatabaseConfig};
use sqlx::MySqlPool;
#[handler]
fn index(session: &Session) {
todo!()
}
let pool = MySqlPool::connect("mysql://root:123456@localhost/my_database")
.await
.unwrap();
let storage = MysqlSessionStorage::try_new(DatabaseConfig::new(), pool).await.unwrap();
let route = Route::new().at("/", index).with(ServerSession::new(CookieConfig::new(),storage));
```

View File

@@ -1,28 +0,0 @@
#![allow(dead_code)]
/// A configuration for database.
pub struct DatabaseConfig {
pub(crate) table_name: String,
}
impl Default for DatabaseConfig {
fn default() -> Self {
DatabaseConfig {
table_name: "poem_sessions".to_string(),
}
}
}
impl DatabaseConfig {
/// Create an [`DatabaseConfig`].
pub fn new() -> Self {
Default::default()
}
/// Specifies the table name.
pub fn table_name(self, table_name: impl Into<String>) -> Self {
Self {
table_name: table_name.into(),
}
}
}

View File

@@ -1,56 +0,0 @@
//! Session storage using database for Poem
//!
//! # Crate features
//!
//! ## [`sqlx`](https://crates.io/crates/sqlx)
//!
//! | feature | database | tls |
//! |---------------------------|----------|------------|
//! | sqlx-mysql-rustls | mysql | rustls |
//! | sqlx-mysql-native-tls | mysql | native-tls |
//! | sqlx-postgres-rustls | postgres | rustls |
//! | sqlx-postgres-native-tls | postgres | native-tls |
//! | sqlx-sqlite-rustls | sqlite | rustls |
//! | sqlx-sqlite-native-tls | sqlite | native-tls |
//!
//! ## Example
//!
//! ```rust,ignore
//! use poem::session::{CookieConfig, ServerSession, Session};
//! use poem_dbsession::{sqlx::MysqlSessionStorage, DatabaseConfig};
//! use sqlx::MySqlPool;
//!
//! #[handler]
//! fn index(session: &Session) {
//! todo!()
//! }
//!
//! let pool = MySqlPool::connect("mysql://root:123456@localhost/my_database")
//! .await
//! .unwrap();
//! let storage = MysqlSessionStorage::try_new(DatabaseConfig::new(), pool).await.unwrap();
//! let route = Route::new().at("/", index).with(ServerSession::new(CookieConfig::new(),storage));
//! ```
#![doc(html_favicon_url = "https://raw.githubusercontent.com/poem-web/poem/master/favicon.ico")]
#![doc(html_logo_url = "https://raw.githubusercontent.com/poem-web/poem/master/logo.png")]
#![forbid(unsafe_code)]
#![deny(private_in_public, unreachable_pub)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![warn(missing_docs)]
#[cfg(any(
feature = "sqlx-mysql-rustls",
feature = "sqlx-mysql-native-tls",
feature = "sqlx-postgres-rustls",
feature = "sqlx-postgres-native-tls",
feature = "sqlx-sqlite-rustls",
feature = "sqlx-sqlite-native-tls"
))]
pub mod sqlx;
mod config;
#[cfg(test)]
mod test_harness;
pub use config::DatabaseConfig;

View File

@@ -1,15 +0,0 @@
//! sqlx-backed session storages.
#[cfg(any(feature = "sqlx-mysql-rustls", feature = "sqlx-mysql-native-tls"))]
mod mysql;
#[cfg(any(feature = "sqlx-postgres-rustls", feature = "sqlx-postgres-native-tls"))]
mod postgres;
#[cfg(any(feature = "sqlx-sqlite-rustls", feature = "sqlx-sqlite-native-tls"))]
mod sqlite;
#[cfg(any(feature = "sqlx-mysql-rustls", feature = "sqlx-mysql-native-tls"))]
pub use mysql::MysqlSessionStorage;
#[cfg(any(feature = "sqlx-postgres-rustls", feature = "sqlx-postgres-native-tls"))]
pub use postgres::PgSessionStorage;
#[cfg(any(feature = "sqlx-sqlite-rustls", feature = "sqlx-sqlite-native-tls"))]
pub use sqlite::SqliteSessionStorage;

View File

@@ -1,206 +0,0 @@
use std::{collections::BTreeMap, time::Duration};
use chrono::Utc;
use poem::{error::InternalServerError, session::SessionStorage, Result};
use serde_json::Value;
use sqlx::{mysql::MySqlStatement, types::Json, Executor, MySqlPool, Statement};
use crate::DatabaseConfig;
const LOAD_SESSION_SQL: &str = r#"
select session from {table_name}
where id = ? and (expires is null or expires > ?)
"#;
const UPDATE_SESSION_SQL: &str = r#"
insert into {table_name} (id, session, expires) values (?, ?, ?)
on duplicate key update
expires = values(expires),
session = values(session)
"#;
const REMOVE_SESSION_SQL: &str = r#"
delete from {table_name} where id = ?
"#;
const CLEANUP_SQL: &str = r#"
delete from {table_name} where expires < ?
"#;
/// Session storage using Mysql.
///
/// # Errors
///
/// - [`sqlx::Error`]
///
/// # Create the table for session storage
///
/// ```sql
/// create table if not exists poem_sessions (
/// id varchar(128) not null,
/// expires timestamp(6) null,
/// session text not null,
/// primary key (id),
/// key expires (expires)
/// )
/// engine=innodb
/// default charset=utf8
/// ```
#[derive(Clone)]
pub struct MysqlSessionStorage {
pool: MySqlPool,
load_stmt: MySqlStatement<'static>,
update_stmt: MySqlStatement<'static>,
remove_stmt: MySqlStatement<'static>,
cleanup_stmt: MySqlStatement<'static>,
}
impl MysqlSessionStorage {
/// Create an [`MysqlSessionStorage`].
pub async fn try_new(config: DatabaseConfig, pool: MySqlPool) -> sqlx::Result<Self> {
let mut conn = pool.acquire().await?;
let load_stmt = Statement::to_owned(
&conn
.prepare(&LOAD_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let update_stmt = Statement::to_owned(
&conn
.prepare(&UPDATE_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let remove_stmt = Statement::to_owned(
&conn
.prepare(&REMOVE_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let cleanup_stmt = Statement::to_owned(
&conn
.prepare(&CLEANUP_SQL.replace("{table_name}", &config.table_name))
.await?,
);
Ok(Self {
pool,
load_stmt,
update_stmt,
remove_stmt,
cleanup_stmt,
})
}
/// Cleanup expired sessions.
pub async fn cleanup(&self) -> sqlx::Result<()> {
let mut conn = self.pool.acquire().await?;
self.cleanup_stmt
.query()
.bind(Utc::now())
.execute(&mut conn)
.await?;
Ok(())
}
}
#[poem::async_trait]
impl SessionStorage for MysqlSessionStorage {
async fn load_session(&self, session_id: &str) -> Result<Option<BTreeMap<String, Value>>> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
let res: Option<(Json<BTreeMap<String, Value>>,)> = self
.load_stmt
.query_as()
.bind(session_id)
.bind(Utc::now())
.fetch_optional(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(res.map(|(value,)| value.0))
}
async fn update_session(
&self,
session_id: &str,
entries: &BTreeMap<String, Value>,
expires: Option<Duration>,
) -> Result<()> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
let expires = match expires {
Some(expires) => {
Some(chrono::Duration::from_std(expires).map_err(InternalServerError)?)
}
None => None,
};
self.update_stmt
.query()
.bind(session_id)
.bind(Json(entries))
.bind(expires.map(|expires| Utc::now() + expires))
.execute(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(())
}
async fn remove_session(&self, session_id: &str) -> Result<()> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
self.remove_stmt
.query()
.bind(session_id)
.execute(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_harness;
#[tokio::test]
async fn test() {
let pool = MySqlPool::connect("mysql://root:123456@localhost/test_poem_sessions")
.await
.unwrap();
let mut conn = pool.acquire().await.unwrap();
sqlx::query(
r#"
create table if not exists poem_sessions (
id varchar(128) not null,
expires timestamp(6) null,
session text not null,
primary key (id),
key expires (expires)
)
engine=innodb
default charset=utf8
"#,
)
.execute(&mut conn)
.await
.unwrap();
let storage = MysqlSessionStorage::try_new(DatabaseConfig::new(), pool)
.await
.unwrap();
let join_handle = tokio::spawn({
let storage = storage.clone();
async move {
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
storage.cleanup().await.unwrap();
}
}
});
test_harness::test_storage(storage).await;
join_handle.abort();
}
}

View File

@@ -1,209 +0,0 @@
use std::{collections::BTreeMap, time::Duration};
use chrono::Utc;
use poem::{error::InternalServerError, session::SessionStorage, Result};
use serde_json::Value;
use sqlx::{postgres::PgStatement, types::Json, Executor, PgPool, Statement};
use crate::DatabaseConfig;
const LOAD_SESSION_SQL: &str = r#"
select session from {table_name}
where id = $1 and (expires is null or expires > $2)
"#;
const UPDATE_SESSION_SQL: &str = r#"
insert into {table_name} (id, session, expires) values ($1, $2, $3)
on conflict(id) do update set
expires = excluded.expires,
session = excluded.session
"#;
const REMOVE_SESSION_SQL: &str = r#"
delete from {table_name} where id = $1
"#;
const CLEANUP_SQL: &str = r#"
delete from {table_name} where expires < $1
"#;
/// Session storage using Postgres.
///
/// # Errors
///
/// - [`sqlx::Error`]
///
/// # Create the table for session storage
///
/// ```sql
/// create table if not exists poem_sessions (
/// id varchar not null primary key,
/// expires timestamp with time zone null,
/// session jsonb not null
/// );
///
/// create index if not exists poem_sessions_expires_idx on poem_sessions (expires);
/// ```
#[derive(Clone)]
pub struct PgSessionStorage {
pool: PgPool,
load_stmt: PgStatement<'static>,
update_stmt: PgStatement<'static>,
remove_stmt: PgStatement<'static>,
cleanup_stmt: PgStatement<'static>,
}
impl PgSessionStorage {
/// Create an [`PgSessionStorage`].
pub async fn try_new(config: DatabaseConfig, pool: PgPool) -> sqlx::Result<Self> {
let mut conn = pool.acquire().await?;
let load_stmt = Statement::to_owned(
&conn
.prepare(&LOAD_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let update_stmt = Statement::to_owned(
&conn
.prepare(&UPDATE_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let remove_stmt = Statement::to_owned(
&conn
.prepare(&REMOVE_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let cleanup_stmt = Statement::to_owned(
&conn
.prepare(&CLEANUP_SQL.replace("{table_name}", &config.table_name))
.await?,
);
Ok(Self {
pool,
load_stmt,
update_stmt,
remove_stmt,
cleanup_stmt,
})
}
/// Cleanup expired sessions.
pub async fn cleanup(&self) -> sqlx::Result<()> {
let mut conn = self.pool.acquire().await?;
self.cleanup_stmt
.query()
.bind(Utc::now())
.execute(&mut conn)
.await?;
Ok(())
}
}
#[poem::async_trait]
impl SessionStorage for PgSessionStorage {
async fn load_session(&self, session_id: &str) -> Result<Option<BTreeMap<String, Value>>> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
let res: Option<(Json<BTreeMap<String, Value>>,)> = self
.load_stmt
.query_as()
.bind(session_id)
.bind(Utc::now())
.fetch_optional(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(res.map(|(value,)| value.0))
}
async fn update_session(
&self,
session_id: &str,
entries: &BTreeMap<String, Value>,
expires: Option<Duration>,
) -> Result<()> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
let expires = match expires {
Some(expires) => {
Some(chrono::Duration::from_std(expires).map_err(InternalServerError)?)
}
None => None,
};
self.update_stmt
.query()
.bind(session_id)
.bind(Json(entries))
.bind(expires.map(|expires| Utc::now() + expires))
.execute(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(())
}
async fn remove_session(&self, session_id: &str) -> Result<()> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
self.remove_stmt
.query()
.bind(session_id)
.execute(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_harness;
#[tokio::test]
async fn test() {
let pool = PgPool::connect("postgres://postgres:123456@localhost/test_poem_sessions")
.await
.unwrap();
let mut conn = pool.acquire().await.unwrap();
sqlx::query(
r#"
create table if not exists poem_sessions (
id varchar not null primary key,
expires timestamp with time zone null,
session jsonb not null
)
"#,
)
.execute(&mut conn)
.await
.unwrap();
sqlx::query(
r#"
create index if not exists poem_sessions_expires_idx on poem_sessions (expires)
"#,
)
.execute(&mut conn)
.await
.unwrap();
let storage = PgSessionStorage::try_new(DatabaseConfig::new(), pool)
.await
.unwrap();
let join_handle = tokio::spawn({
let storage = storage.clone();
async move {
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
storage.cleanup().await.unwrap();
}
}
});
test_harness::test_storage(storage).await;
join_handle.abort();
}
}

View File

@@ -1,196 +0,0 @@
use std::{collections::BTreeMap, time::Duration};
use chrono::Utc;
use poem::{error::InternalServerError, session::SessionStorage, Result};
use serde_json::Value;
use sqlx::{sqlite::SqliteStatement, types::Json, Executor, SqlitePool, Statement};
use crate::DatabaseConfig;
const LOAD_SESSION_SQL: &str = r#"
select session from {table_name}
where id = ? and (expires is null or expires > ?)
"#;
const UPDATE_SESSION_SQL: &str = r#"
insert into {table_name} (id, session, expires) values (?, ?, ?)
on conflict(id) do update set
expires = excluded.expires,
session = excluded.session
"#;
const REMOVE_SESSION_SQL: &str = r#"
delete from {table_name} where id = ?
"#;
const CLEANUP_SQL: &str = r#"
delete from {table_name} where expires < $1
"#;
/// Session storage using Sqlite.
///
/// # Errors
///
/// - [`sqlx::Error`]
///
/// # Create the table for session storage
///
/// ```sql
/// create table poem_sessions (
/// id text primary key not null,
/// expires integer null,
/// session text not null
/// )
/// ```
#[derive(Clone)]
pub struct SqliteSessionStorage {
pool: SqlitePool,
load_stmt: SqliteStatement<'static>,
update_stmt: SqliteStatement<'static>,
remove_stmt: SqliteStatement<'static>,
cleanup_stmt: SqliteStatement<'static>,
}
impl SqliteSessionStorage {
/// Create an [`SqliteSessionStorage`].
pub async fn try_new(config: DatabaseConfig, pool: SqlitePool) -> sqlx::Result<Self> {
let mut conn = pool.acquire().await?;
let load_stmt = Statement::to_owned(
&conn
.prepare(&LOAD_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let update_stmt = Statement::to_owned(
&conn
.prepare(&UPDATE_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let remove_stmt = Statement::to_owned(
&conn
.prepare(&REMOVE_SESSION_SQL.replace("{table_name}", &config.table_name))
.await?,
);
let cleanup_stmt = Statement::to_owned(
&conn
.prepare(&CLEANUP_SQL.replace("{table_name}", &config.table_name))
.await?,
);
Ok(Self {
pool,
load_stmt,
update_stmt,
remove_stmt,
cleanup_stmt,
})
}
/// Cleanup expired sessions.
pub async fn cleanup(&self) -> sqlx::Result<()> {
let mut conn = self.pool.acquire().await?;
self.cleanup_stmt
.query()
.bind(Utc::now())
.execute(&mut conn)
.await?;
Ok(())
}
}
#[poem::async_trait]
impl SessionStorage for SqliteSessionStorage {
async fn load_session(&self, session_id: &str) -> Result<Option<BTreeMap<String, Value>>> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
let res: Option<(Json<BTreeMap<String, Value>>,)> = self
.load_stmt
.query_as()
.bind(session_id)
.bind(Utc::now())
.fetch_optional(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(res.map(|(value,)| value.0))
}
async fn update_session(
&self,
session_id: &str,
entries: &BTreeMap<String, Value>,
expires: Option<Duration>,
) -> Result<()> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
let expires = match expires {
Some(expires) => {
Some(chrono::Duration::from_std(expires).map_err(InternalServerError)?)
}
None => None,
};
self.update_stmt
.query()
.bind(session_id)
.bind(Json(entries))
.bind(expires.map(|expires| Utc::now() + expires))
.execute(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(())
}
async fn remove_session(&self, session_id: &str) -> Result<()> {
let mut conn = self.pool.acquire().await.map_err(InternalServerError)?;
self.remove_stmt
.query()
.bind(session_id)
.execute(&mut conn)
.await
.map_err(InternalServerError)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_harness;
#[tokio::test]
async fn test() {
let pool = SqlitePool::connect("sqlite::memory:").await.unwrap();
let mut conn = pool.acquire().await.unwrap();
sqlx::query(
r#"
create table poem_sessions (
id text primary key not null,
expires integer null,
session text not null
)
"#,
)
.execute(&mut conn)
.await
.unwrap();
let storage = SqliteSessionStorage::try_new(DatabaseConfig::new(), pool)
.await
.unwrap();
let join_handle = tokio::spawn({
let storage = storage.clone();
async move {
loop {
tokio::time::sleep(Duration::from_secs(1)).await;
storage.cleanup().await.unwrap();
}
}
});
test_harness::test_storage(storage).await;
join_handle.abort();
}
}

View File

@@ -1,50 +0,0 @@
#![allow(dead_code)]
use std::{collections::BTreeMap, time::Duration};
use poem::session::SessionStorage;
pub(crate) async fn test_storage(storage: impl SessionStorage) {
let mut entries1 = BTreeMap::new();
entries1.insert("a".to_string(), "1".into());
entries1.insert("b".to_string(), "2".into());
let mut entries2 = BTreeMap::new();
entries2.insert("c".to_string(), "3".into());
entries2.insert("d".to_string(), "4".into());
let mut entries3 = BTreeMap::new();
entries3.insert("e".to_string(), "5".into());
entries3.insert("f".to_string(), "6".into());
storage
.update_session("a1", &entries1, Some(Duration::from_secs(3)))
.await
.unwrap();
storage.update_session("a2", &entries2, None).await.unwrap();
assert_eq!(
storage.load_session("a1").await.unwrap().as_ref(),
Some(&entries1)
);
assert_eq!(
storage.load_session("a2").await.unwrap().as_ref(),
Some(&entries2)
);
tokio::time::sleep(Duration::from_secs(5)).await;
assert_eq!(storage.load_session("a1").await.unwrap().as_ref(), None);
assert_eq!(
storage.load_session("a2").await.unwrap().as_ref(),
Some(&entries2)
);
storage.update_session("a2", &entries3, None).await.unwrap();
assert_eq!(
storage.load_session("a2").await.unwrap().as_ref(),
Some(&entries3)
);
storage.remove_session("a2").await.unwrap();
assert_eq!(storage.load_session("a2").await.unwrap().as_ref(), None);
}

View File

@@ -21,7 +21,7 @@ categories = [
proc-macro = true
[dependencies]
proc-macro-crate = "1.1.0"
proc-macro2 = "1.0.29"
quote = "1.0.9"
syn = { version = "1.0.77", features = ["full"] }
proc-macro-crate.workspace = true
proc-macro2.workspace = true
quote.workspace = true
syn = { workspace = true, features = ["full"] }

View File

@@ -11,9 +11,7 @@ mod utils;
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{
parse_macro_input, AttributeArgs, FnArg, GenericParam, ItemFn, Member, Meta, NestedMeta, Result,
};
use syn::{parse_macro_input, FnArg, GenericParam, ItemFn, Member, Result};
/// Wrap an asynchronous function as an `Endpoint`.
///
@@ -26,14 +24,15 @@ use syn::{
/// ```
#[proc_macro_attribute]
pub fn handler(args: TokenStream, input: TokenStream) -> TokenStream {
let args: AttributeArgs = parse_macro_input!(args as AttributeArgs);
let mut internal = false;
for arg in args {
if matches!(arg,NestedMeta::Meta(Meta::Path(p)) if p.is_ident("internal")) {
let arg_parser = syn::meta::parser(|meta| {
if meta.path.is_ident("internal") {
internal = true;
}
}
Ok(())
});
parse_macro_input!(args with arg_parser);
match generate_handler(internal, input) {
Ok(stream) => stream,
@@ -49,7 +48,7 @@ fn generate_handler(internal: bool, input: TokenStream) -> Result<TokenStream> {
let docs = item_fn
.attrs
.iter()
.filter(|attr| attr.path.is_ident("doc"))
.filter(|attr| attr.path().is_ident("doc"))
.cloned()
.collect::<Vec<_>>();
let ident = &item_fn.sig.ident;

View File

@@ -13,12 +13,12 @@ keywords = ["http", "async", "grpc"]
categories = ["network-programming", "asynchronous"]
[dependencies]
prettyplease = "0.1.16"
proc-macro2 = "1.0.37"
prettyplease = "0.2.9"
prost-build = "0.11.1"
quote = "1.0.17"
syn = "1.0.91"
proc-macro-crate = "1.1.0"
quote.workspace = true
proc-macro2.workspace = true
syn.workspace = true
proc-macro-crate.workspace = true
[package.metadata.workspaces]
independent = true

View File

@@ -16,11 +16,11 @@ categories = ["network-programming", "asynchronous"]
proc-macro = true
[dependencies]
darling = "0.14.1"
proc-macro-crate = "1.1.0"
proc-macro2 = "1.0.29"
quote = "1.0.9"
syn = { version = "1.0.77", features = ["full", "visit-mut"] }
darling = "0.20.1"
proc-macro-crate.workspace = true
proc-macro2.workspace = true
quote.workspace = true
syn = { workspace = true, features = ["full", "visit-mut"] }
thiserror.workspace = true
indexmap = "1.8.2"
regex.workspace = true

View File

@@ -3,8 +3,8 @@ use indexmap::IndexMap;
use proc_macro2::{Ident, TokenStream};
use quote::{format_ident, quote};
use syn::{
ext::IdentExt, visit_mut::VisitMut, AttributeArgs, Error, FnArg, ImplItem, ImplItemMethod,
ItemImpl, Pat, Path, ReturnType, Type,
ext::IdentExt, visit_mut::VisitMut, Error, FnArg, ImplItem, ImplItemFn, ItemImpl, Pat, Path,
ReturnType, Type,
};
use crate::{
@@ -19,7 +19,7 @@ use crate::{
};
#[derive(FromMeta)]
struct APIArgs {
pub(crate) struct APIArgs {
#[darling(default)]
internal: bool,
#[darling(default)]
@@ -84,15 +84,8 @@ struct Context {
register_items: Vec<TokenStream>,
}
pub(crate) fn generate(
args: AttributeArgs,
mut item_impl: ItemImpl,
) -> GeneratorResult<TokenStream> {
let api_args = match APIArgs::from_list(&args) {
Ok(args) => args,
Err(err) => return Ok(err.write_errors()),
};
let crate_name = get_crate_name(api_args.internal);
pub(crate) fn generate(args: APIArgs, mut item_impl: ItemImpl) -> GeneratorResult<TokenStream> {
let crate_name = get_crate_name(args.internal);
let ident = item_impl.self_ty.clone();
let (impl_generics, _, where_clause) = item_impl.generics.split_for_impl();
let mut ctx = Context {
@@ -102,7 +95,7 @@ pub(crate) fn generate(
};
for item in &mut item_impl.items {
if let ImplItem::Method(method) = item {
if let ImplItem::Fn(method) = item {
if let Some(operation_args) = parse_oai_attrs::<APIOperation>(&method.attrs)? {
if method.sig.asyncness.is_none() {
return Err(
@@ -110,7 +103,7 @@ pub(crate) fn generate(
);
}
generate_operation(&mut ctx, &crate_name, &api_args, operation_args, method)?;
generate_operation(&mut ctx, &crate_name, &args, operation_args, method)?;
remove_oai_attrs(&mut method.attrs);
}
}
@@ -165,7 +158,7 @@ fn generate_operation(
crate_name: &TokenStream,
api_args: &APIArgs,
args: APIOperation,
item_method: &mut ImplItemMethod,
item_method: &mut ImplItemFn,
) -> GeneratorResult<()> {
let APIOperation {
path,
@@ -238,8 +231,8 @@ fn generate_operation(
FnArg::Typed(pat) => {
let ident = match &*pat.pat {
Pat::Ident(ident) => ident,
Pat::TupleStruct(tuple_struct) => match tuple_struct.pat.elems.first() {
Some(Pat::Ident(ident)) if tuple_struct.pat.elems.len() == 1 => ident,
Pat::TupleStruct(tuple_struct) => match tuple_struct.elems.first() {
Some(Pat::Ident(ident)) if tuple_struct.elems.len() == 1 => ident,
_ => {
return Err(Error::new_spanned(
tuple_struct,

View File

@@ -1,7 +1,7 @@
use darling::{util::SpannedValue, FromMeta};
use darling::{ast::NestedMeta, util::SpannedValue, FromMeta};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{Lit, Meta, NestedMeta, Path};
use syn::{Lit, Meta, Path};
#[derive(Debug, Copy, Clone, FromMeta)]
#[allow(clippy::enum_variant_names)]
@@ -212,7 +212,6 @@ impl ExternalDocument {
#[derive(FromMeta)]
pub(crate) struct ExtraHeader {
pub(crate) name: String,
#[darling(rename = "type")]
pub(crate) ty: SpannedValue<String>,
#[darling(default)]
pub(crate) description: Option<String>,

View File

@@ -25,8 +25,27 @@ mod union;
mod utils;
mod webhook;
use darling::FromMeta;
use proc_macro::TokenStream;
use syn::{parse_macro_input, AttributeArgs, DeriveInput, ItemImpl, ItemTrait};
use syn::{parse_macro_input, DeriveInput, ItemImpl, ItemTrait};
macro_rules! parse_nested_meta {
($ty:ty, $args:expr) => {{
let meta = match darling::ast::NestedMeta::parse_meta_list(proc_macro2::TokenStream::from(
$args,
)) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(darling::Error::from(e).write_errors());
}
};
match <$ty>::from_list(&meta) {
Ok(object_args) => object_args,
Err(err) => return TokenStream::from(err.write_errors()),
}
}};
}
#[proc_macro_derive(Object, attributes(oai))]
pub fn derive_object(input: TokenStream) -> TokenStream {
@@ -85,9 +104,9 @@ pub fn derive_response_content(input: TokenStream) -> TokenStream {
#[proc_macro_attribute]
#[allow(non_snake_case)]
pub fn OpenApi(args: TokenStream, input: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as AttributeArgs);
let api_args = parse_nested_meta!(api::APIArgs, args);
let item_impl = parse_macro_input!(input as ItemImpl);
match api::generate(args, item_impl) {
match api::generate(api_args, item_impl) {
Ok(stream) => stream.into(),
Err(err) => err.write_errors().into(),
}
@@ -132,9 +151,9 @@ pub fn derive_security_scheme(input: TokenStream) -> TokenStream {
#[proc_macro_attribute]
#[allow(non_snake_case)]
pub fn Webhook(args: TokenStream, input: TokenStream) -> TokenStream {
let args = parse_macro_input!(args as AttributeArgs);
let webhook_args = parse_nested_meta!(webhook::WebhookArgs, args);
let item_trait = parse_macro_input!(input as ItemTrait);
match webhook::generate(args, item_trait) {
match webhook::generate(webhook_args, item_trait) {
Ok(stream) => stream.into(),
Err(err) => err.write_errors().into(),
}

View File

@@ -196,9 +196,8 @@ struct SecuritySchemeArgs {
internal: bool,
#[darling(default)]
rename: Option<String>,
#[darling(rename = "type")]
ty: AuthType,
#[darling(default, rename = "in")]
#[darling(default)]
key_in: Option<ApiKeyInType>,
#[darling(default)]
key_name: Option<SpannedValue<String>>,

View File

@@ -5,8 +5,8 @@ use proc_macro2::{Ident, Span, TokenStream};
use proc_macro_crate::{crate_name, FoundCrate};
use quote::quote;
use syn::{
visit_mut, visit_mut::VisitMut, Attribute, Error, GenericParam, Generics, Lifetime, Lit, Meta,
Result,
visit_mut, visit_mut::VisitMut, Attribute, Error, Expr, ExprLit, GenericParam, Generics,
Lifetime, Lit, Meta, Result,
};
use crate::error::GeneratorResult;
@@ -27,9 +27,12 @@ pub(crate) fn get_crate_name(internal: bool) -> TokenStream {
pub(crate) fn get_description(attrs: &[Attribute]) -> Result<Option<String>> {
let mut full_docs = String::new();
for attr in attrs {
if attr.path.is_ident("doc") {
if let Meta::NameValue(nv) = attr.parse_meta()? {
if let Lit::Str(doc) = nv.lit {
if attr.path().is_ident("doc") {
if let Meta::NameValue(nv) = &attr.meta {
if let Expr::Lit(ExprLit {
lit: Lit::Str(doc), ..
}) = &nv.value
{
let doc = doc.value();
let doc_str = doc.trim();
if !full_docs.is_empty() {
@@ -48,7 +51,7 @@ pub(crate) fn get_description(attrs: &[Attribute]) -> Result<Option<String>> {
}
pub(crate) fn remove_description(attrs: &mut Vec<Attribute>) {
attrs.retain(|attr| !attr.path.is_ident("doc"));
attrs.retain(|attr| !attr.path().is_ident("doc"));
}
pub(crate) fn get_summary_and_description(
@@ -91,7 +94,7 @@ pub(crate) fn remove_oai_attrs(attrs: &mut Vec<Attribute>) {
if let Some((idx, _)) = attrs
.iter()
.enumerate()
.find(|(_, a)| a.path.is_ident("oai"))
.find(|(_, a)| a.path().is_ident("oai"))
{
attrs.remove(idx);
}
@@ -99,9 +102,8 @@ pub(crate) fn remove_oai_attrs(attrs: &mut Vec<Attribute>) {
pub(crate) fn parse_oai_attrs<T: FromMeta>(attrs: &[Attribute]) -> GeneratorResult<Option<T>> {
for attr in attrs {
if attr.path.is_ident("oai") {
let meta = attr.parse_meta()?;
return Ok(Some(T::from_meta(&meta)?));
if attr.path().is_ident("oai") {
return Ok(Some(T::from_meta(&attr.meta)?));
}
}
Ok(None)

View File

@@ -5,8 +5,8 @@ use indexmap::IndexMap;
use proc_macro2::TokenStream;
use quote::quote;
use syn::{
ext::IdentExt, visit_mut::VisitMut, AttributeArgs, Error, FnArg, ItemTrait, Pat, Path,
ReturnType, TraitItem, TraitItemMethod,
ext::IdentExt, visit_mut::VisitMut, Error, FnArg, ItemTrait, Pat, Path, ReturnType, TraitItem,
TraitItemFn,
};
use crate::{
@@ -21,7 +21,7 @@ use crate::{
};
#[derive(FromMeta)]
struct WebhookArgs {
pub(crate) struct WebhookArgs {
#[darling(default)]
internal: bool,
#[darling(default, multiple, rename = "tag")]
@@ -64,14 +64,10 @@ struct Context {
}
pub(crate) fn generate(
args: AttributeArgs,
args: WebhookArgs,
mut trait_impl: ItemTrait,
) -> GeneratorResult<TokenStream> {
let webhook_args = match WebhookArgs::from_list(&args) {
Ok(args) => args,
Err(err) => return Ok(err.write_errors()),
};
let crate_name = get_crate_name(webhook_args.internal);
let crate_name = get_crate_name(args.internal);
let ident = trait_impl.ident.clone();
let mut ctx = Context {
operations: Default::default(),
@@ -80,7 +76,7 @@ pub(crate) fn generate(
};
for item in &mut trait_impl.items {
if let TraitItem::Method(method) = item {
if let TraitItem::Fn(method) = item {
if let Some(operation_args) = parse_oai_attrs::<WebhookOperation>(&method.attrs)? {
if method.sig.asyncness.is_none() {
return Err(
@@ -88,7 +84,7 @@ pub(crate) fn generate(
);
}
generate_operation(&mut ctx, &crate_name, &webhook_args, operation_args, method)?;
generate_operation(&mut ctx, &crate_name, &args, operation_args, method)?;
remove_oai_attrs(&mut method.attrs);
}
}
@@ -125,7 +121,7 @@ fn generate_operation(
crate_name: &TokenStream,
webhook_args: &WebhookArgs,
args: WebhookOperation,
trait_method: &mut TraitItemMethod,
trait_method: &mut TraitItemFn,
) -> GeneratorResult<()> {
let WebhookOperation {
name,

View File

@@ -4,6 +4,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# [3.0.0] 2023-06-21
- bump `syn` from `1.0` to `2.0`
- bump `darling` from `0.14` to `0.20`
## Breaking Changes
- Since `syn 2.0` no longer supports keywords as meta path, renamed some parameters in macros.
| Macro | Old Name | New Name |
|--------------------|----------|----------|
| SecuritySchema | type | ty |
| SecuritySchema | in | key_in |
| ApiResponse.header | type | ty |
https://github.com/dtolnay/syn/issues/1458
https://github.com/TedDriggs/darling/issues/238
# [2.0.27] 2023-06-06
- feat: Implement Type on the char primitive [#518](https://github.com/poem-web/poem/pull/518)

View File

@@ -28,7 +28,7 @@ Define a OpenAPI response.
| Attribute | description | Type | Optional |
|-------------|--------------------|--------|----------|
| name | Header name | String | N |
| type | Header type | String | N |
| ty | Header type | String | N |
| description | Header description | String | Y |
| deprecated | Header deprecated | bool | Y |
@@ -53,11 +53,11 @@ use poem_openapi::ApiResponse;
#[derive(ApiResponse)]
#[oai(
header(name = "X-ExtraHeader-1", type = "String"),
header(name = "X-ExtraHeader-2", type = "i32"),
header(name = "X-ExtraHeader-1", ty = "String"),
header(name = "X-ExtraHeader-2", ty = "i32"),
)]
enum CreateUserResponse {
#[oai(status = 200, header(name = "X-ExtraHeader-3", type = "f32"))]
#[oai(status = 200, header(name = "X-ExtraHeader-3", ty = "f32"))]
Ok,
}
```

View File

@@ -5,8 +5,8 @@ Define a OpenAPI Security Scheme.
| Attribute | Description | Type | Optional |
|--------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------|----------|
| rename | Rename the security scheme. | string | Y |
| type | The type of the security scheme. (api_key, basic, bearer, oauth2, openid_connect) | string | N |
| in | `api_key` The location of the API key. Valid values are "query", "header" or "cookie". (query, header, cookie) | string | Y |
| ty | The type of the security scheme. (api_key, basic, bearer, oauth2, openid_connect) | string | N |
| key_in | `api_key` The location of the API key. Valid values are "query", "header" or "cookie". (query, header, cookie) | string | Y |
| key_name | `api_key` The name of the header, query or cookie parameter to be used.. | string | Y |
| bearer_format | `bearer` A hint to the client to identify how the bearer token is formatted. Bearer tokens are usually generated by an authorization server, so this information is primarily for documentation purposes. | string | Y |
| flows | `oauth2` An object containing configuration information for the flow types supported. | OAuthFlows | Y |

View File

@@ -568,8 +568,8 @@ async fn extra_response_headers_on_operation() {
#[oai(
path = "/",
method = "get",
response_header(name = "A1", type = "String", description = "abc"),
response_header(name = "a2", type = "i32", deprecated = true)
response_header(name = "A1", ty = "String", description = "abc"),
response_header(name = "a2", ty = "i32", deprecated = true)
)]
async fn test(&self) {}
}
@@ -594,8 +594,8 @@ async fn extra_response_headers_on_api() {
struct Api;
#[OpenApi(
response_header(name = "A1", type = "String", description = "abc"),
response_header(name = "a2", type = "i32", deprecated = true)
response_header(name = "A1", ty = "String", description = "abc"),
response_header(name = "a2", ty = "i32", deprecated = true)
)]
impl Api {
#[oai(path = "/", method = "get")]
@@ -626,8 +626,8 @@ async fn extra_request_headers_on_operation() {
#[oai(
path = "/",
method = "get",
request_header(name = "A1", type = "String", description = "abc"),
request_header(name = "a2", type = "i32", deprecated = true)
request_header(name = "A1", ty = "String", description = "abc"),
request_header(name = "a2", ty = "i32", deprecated = true)
)]
async fn test(&self) {}
}
@@ -656,8 +656,8 @@ async fn extra_request_headers_on_api() {
struct Api;
#[OpenApi(
request_header(name = "A1", type = "String", description = "abc"),
request_header(name = "a2", type = "i32", deprecated = true)
request_header(name = "A1", ty = "String", description = "abc"),
request_header(name = "a2", ty = "i32", deprecated = true)
)]
impl Api {
#[oai(path = "/", method = "get")]

View File

@@ -142,23 +142,23 @@ enum MyTags {
}
#[derive(::poem_openapi::SecurityScheme)]
#[oai(type = "basic")]
#[oai(ty = "basic")]
struct BasicSecurityScheme(::poem_openapi::auth::Basic);
#[derive(::poem_openapi::SecurityScheme)]
#[oai(type = "bearer")]
#[oai(ty = "bearer")]
struct MyBearerScheme(::poem_openapi::auth::Bearer);
#[derive(::poem_openapi::SecurityScheme)]
#[oai(type = "api_key", key_name = "X-API-Key", in = "header")]
#[oai(ty = "api_key", key_name = "X-API-Key", key_in = "header")]
struct MySecuritySchemeInHeader(::poem_openapi::auth::ApiKey);
#[derive(::poem_openapi::SecurityScheme)]
#[oai(type = "api_key", key_name = "key", in = "query")]
#[oai(ty = "api_key", key_name = "key", key_in = "query")]
struct MySecuritySchemeInQuery(::poem_openapi::auth::ApiKey);
#[derive(::poem_openapi::SecurityScheme)]
#[oai(type = "api_key", key_name = "key", in = "cookie")]
#[oai(ty = "api_key", key_name = "key", key_in = "cookie")]
struct MySecuritySchemeInCookie(::poem_openapi::auth::ApiKey);
#[derive(::poem_openapi::OAuthScopes)]
@@ -169,7 +169,7 @@ enum GithubScopes {
#[derive(::poem_openapi::SecurityScheme)]
#[oai(
type = "oauth2",
ty = "oauth2",
flows(
implicit(
authorization_url = "https://test.com/authorize",

View File

@@ -362,8 +362,8 @@ async fn header_deprecated() {
async fn extra_headers_on_response() {
#[derive(ApiResponse, Debug, Eq, PartialEq)]
#[oai(
header(name = "A1", type = "String"),
header(name = "a2", type = "i32", description = "abc", deprecated = true)
header(name = "A1", ty = "String"),
header(name = "a2", ty = "i32", description = "abc", deprecated = true)
)]
#[allow(dead_code)]
pub enum Resp {
@@ -398,8 +398,8 @@ async fn extra_headers_on_item() {
pub enum Resp {
#[oai(
status = 200,
header(name = "A1", type = "String"),
header(name = "a2", type = "i32", description = "abc", deprecated = true)
header(name = "A1", ty = "String"),
header(name = "a2", ty = "i32", description = "abc", deprecated = true)
)]
A(Json<i32>, #[oai(header = "A")] String),
}

View File

@@ -15,7 +15,7 @@ use crate::headers::Authorization;
#[test]
fn rename() {
#[derive(SecurityScheme)]
#[oai(rename = "ABC", type = "basic")]
#[oai(rename = "ABC", ty = "basic")]
struct MySecurityScheme(Basic);
assert_eq!(MySecurityScheme::security_scheme().unwrap(), "ABC");
@@ -24,7 +24,7 @@ fn rename() {
#[test]
fn default_rename() {
#[derive(SecurityScheme)]
#[oai(type = "basic")]
#[oai(ty = "basic")]
struct MySecurityScheme(Basic);
assert_eq!(
@@ -39,7 +39,7 @@ fn desc() {
///
/// D
#[derive(SecurityScheme)]
#[oai(type = "basic")]
#[oai(ty = "basic")]
struct MySecurityScheme(Basic);
let mut registry = Registry::new();
@@ -77,7 +77,7 @@ async fn no_auth() {
#[tokio::test]
async fn basic_auth() {
#[derive(SecurityScheme)]
#[oai(type = "basic")]
#[oai(ty = "basic")]
struct MySecurityScheme(Basic);
let mut registry = Registry::new();
@@ -119,7 +119,7 @@ async fn basic_auth() {
#[tokio::test]
async fn bearer_auth() {
#[derive(SecurityScheme)]
#[oai(type = "bearer")]
#[oai(ty = "bearer")]
struct MySecurityScheme(Bearer);
let mut registry = Registry::new();
@@ -161,15 +161,15 @@ async fn bearer_auth() {
#[tokio::test]
async fn api_key_auth() {
#[derive(SecurityScheme)]
#[oai(type = "api_key", key_name = "X-API-Key", in = "header")]
#[oai(ty = "api_key", key_name = "X-API-Key", key_in = "header")]
struct MySecuritySchemeInHeader(ApiKey);
#[derive(SecurityScheme)]
#[oai(type = "api_key", key_name = "key", in = "query")]
#[oai(ty = "api_key", key_name = "key", key_in = "query")]
struct MySecuritySchemeInQuery(ApiKey);
#[derive(SecurityScheme)]
#[oai(type = "api_key", key_name = "key", in = "cookie")]
#[oai(ty = "api_key", key_name = "key", key_in = "cookie")]
struct MySecuritySchemeInCookie(ApiKey);
let mut registry = Registry::new();
@@ -369,7 +369,7 @@ async fn oauth2_auth() {
#[derive(SecurityScheme)]
#[oai(
type = "oauth2",
ty = "oauth2",
flows(
implicit(
authorization_url = "https://test.com/authorize",

View File

@@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
# [1.3.57] 2023-06-21
- bump `syn` from `1.0` to `2.0`
# [1.3.56] 2023-06-06
- allow not falling back to index file [#524](https://github.com/poem-web/poem/pull/524)