use std::borrow::Cow;
use serde::ser::SerializeSeq;
use serde::{Deserialize, Serialize};
use crate::{
PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
};
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
pub struct SemanticTokenType(Cow<'static, str>);
impl SemanticTokenType {
pub const NAMESPACE: SemanticTokenType = SemanticTokenType::new("namespace");
pub const TYPE: SemanticTokenType = SemanticTokenType::new("type");
pub const CLASS: SemanticTokenType = SemanticTokenType::new("class");
pub const ENUM: SemanticTokenType = SemanticTokenType::new("enum");
pub const INTERFACE: SemanticTokenType = SemanticTokenType::new("interface");
pub const STRUCT: SemanticTokenType = SemanticTokenType::new("struct");
pub const TYPE_PARAMETER: SemanticTokenType = SemanticTokenType::new("typeParameter");
pub const PARAMETER: SemanticTokenType = SemanticTokenType::new("parameter");
pub const VARIABLE: SemanticTokenType = SemanticTokenType::new("variable");
pub const PROPERTY: SemanticTokenType = SemanticTokenType::new("property");
pub const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember");
pub const EVENT: SemanticTokenType = SemanticTokenType::new("event");
pub const FUNCTION: SemanticTokenType = SemanticTokenType::new("function");
pub const METHOD: SemanticTokenType = SemanticTokenType::new("method");
pub const MACRO: SemanticTokenType = SemanticTokenType::new("macro");
pub const KEYWORD: SemanticTokenType = SemanticTokenType::new("keyword");
pub const MODIFIER: SemanticTokenType = SemanticTokenType::new("modifier");
pub const COMMENT: SemanticTokenType = SemanticTokenType::new("comment");
pub const STRING: SemanticTokenType = SemanticTokenType::new("string");
pub const NUMBER: SemanticTokenType = SemanticTokenType::new("number");
pub const REGEXP: SemanticTokenType = SemanticTokenType::new("regexp");
pub const OPERATOR: SemanticTokenType = SemanticTokenType::new("operator");
pub const DECORATOR: SemanticTokenType = SemanticTokenType::new("decorator");
pub const fn new(tag: &'static str) -> Self {
SemanticTokenType(Cow::Borrowed(tag))
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<String> for SemanticTokenType {
fn from(from: String) -> Self {
SemanticTokenType(Cow::from(from))
}
}
impl From<&'static str> for SemanticTokenType {
fn from(from: &'static str) -> Self {
SemanticTokenType::new(from)
}
}
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
pub struct SemanticTokenModifier(Cow<'static, str>);
impl SemanticTokenModifier {
pub const DECLARATION: SemanticTokenModifier = SemanticTokenModifier::new("declaration");
pub const DEFINITION: SemanticTokenModifier = SemanticTokenModifier::new("definition");
pub const READONLY: SemanticTokenModifier = SemanticTokenModifier::new("readonly");
pub const STATIC: SemanticTokenModifier = SemanticTokenModifier::new("static");
pub const DEPRECATED: SemanticTokenModifier = SemanticTokenModifier::new("deprecated");
pub const ABSTRACT: SemanticTokenModifier = SemanticTokenModifier::new("abstract");
pub const ASYNC: SemanticTokenModifier = SemanticTokenModifier::new("async");
pub const MODIFICATION: SemanticTokenModifier = SemanticTokenModifier::new("modification");
pub const DOCUMENTATION: SemanticTokenModifier = SemanticTokenModifier::new("documentation");
pub const DEFAULT_LIBRARY: SemanticTokenModifier = SemanticTokenModifier::new("defaultLibrary");
pub const fn new(tag: &'static str) -> Self {
SemanticTokenModifier(Cow::Borrowed(tag))
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<String> for SemanticTokenModifier {
fn from(from: String) -> Self {
SemanticTokenModifier(Cow::from(from))
}
}
impl From<&'static str> for SemanticTokenModifier {
fn from(from: &'static str) -> Self {
SemanticTokenModifier::new(from)
}
}
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
pub struct TokenFormat(Cow<'static, str>);
impl TokenFormat {
pub const RELATIVE: TokenFormat = TokenFormat::new("relative");
pub const fn new(tag: &'static str) -> Self {
TokenFormat(Cow::Borrowed(tag))
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<String> for TokenFormat {
fn from(from: String) -> Self {
TokenFormat(Cow::from(from))
}
}
impl From<&'static str> for TokenFormat {
fn from(from: &'static str) -> Self {
TokenFormat::new(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensLegend {
pub token_types: Vec<SemanticTokenType>,
pub token_modifiers: Vec<SemanticTokenModifier>,
}
#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
pub struct SemanticToken {
pub delta_line: u32,
pub delta_start: u32,
pub length: u32,
pub token_type: u32,
pub token_modifiers_bitset: u32,
}
impl SemanticToken {
fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<SemanticToken>, D::Error>
where
D: serde::Deserializer<'de>,
{
let data = Vec::<u32>::deserialize(deserializer)?;
let chunks = data.chunks_exact(5);
if !chunks.remainder().is_empty() {
return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
}
Result::Ok(
chunks
.map(|chunk| SemanticToken {
delta_line: chunk[0],
delta_start: chunk[1],
length: chunk[2],
token_type: chunk[3],
token_modifiers_bitset: chunk[4],
})
.collect(),
)
}
fn serialize_tokens<S>(tokens: &[SemanticToken], serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
for token in tokens.iter() {
seq.serialize_element(&token.delta_line)?;
seq.serialize_element(&token.delta_start)?;
seq.serialize_element(&token.length)?;
seq.serialize_element(&token.token_type)?;
seq.serialize_element(&token.token_modifiers_bitset)?;
}
seq.end()
}
fn deserialize_tokens_opt<'de, D>(
deserializer: D,
) -> Result<Option<Vec<SemanticToken>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(transparent)]
struct Wrapper {
#[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
tokens: Vec<SemanticToken>,
}
Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
}
fn serialize_tokens_opt<S>(
data: &Option<Vec<SemanticToken>>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
#[derive(Serialize)]
#[serde(transparent)]
struct Wrapper {
#[serde(serialize_with = "SemanticToken::serialize_tokens")]
tokens: Vec<SemanticToken>,
}
let opt = data.as_ref().map(|t| Wrapper { tokens: t.to_vec() });
opt.serialize(serializer)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokens {
#[serde(skip_serializing_if = "Option::is_none")]
pub result_id: Option<String>,
#[serde(
deserialize_with = "SemanticToken::deserialize_tokens",
serialize_with = "SemanticToken::serialize_tokens"
)]
pub data: Vec<SemanticToken>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensPartialResult {
#[serde(
deserialize_with = "SemanticToken::deserialize_tokens",
serialize_with = "SemanticToken::serialize_tokens"
)]
pub data: Vec<SemanticToken>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensResult {
Tokens(SemanticTokens),
Partial(SemanticTokensPartialResult),
}
impl From<SemanticTokens> for SemanticTokensResult {
fn from(from: SemanticTokens) -> Self {
SemanticTokensResult::Tokens(from)
}
}
impl From<SemanticTokensPartialResult> for SemanticTokensResult {
fn from(from: SemanticTokensPartialResult) -> Self {
SemanticTokensResult::Partial(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensEdit {
pub start: u32,
pub delete_count: u32,
#[serde(
default,
skip_serializing_if = "Option::is_none",
deserialize_with = "SemanticToken::deserialize_tokens_opt",
serialize_with = "SemanticToken::serialize_tokens_opt"
)]
pub data: Option<Vec<SemanticToken>>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensFullDeltaResult {
Tokens(SemanticTokens),
TokensDelta(SemanticTokensDelta),
PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
}
impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
fn from(from: SemanticTokens) -> Self {
SemanticTokensFullDeltaResult::Tokens(from)
}
}
impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
fn from(from: SemanticTokensDelta) -> Self {
SemanticTokensFullDeltaResult::TokensDelta(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensDelta {
#[serde(skip_serializing_if = "Option::is_none")]
pub result_id: Option<String>,
pub edits: Vec<SemanticTokensEdit>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensClientCapabilities {
#[serde(skip_serializing_if = "Option::is_none")]
pub dynamic_registration: Option<bool>,
pub requests: SemanticTokensClientCapabilitiesRequests,
pub token_types: Vec<SemanticTokenType>,
pub token_modifiers: Vec<SemanticTokenModifier>,
pub formats: Vec<TokenFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
pub overlapping_token_support: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub multiline_token_support: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub server_cancel_support: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub augments_syntax_tokens: Option<bool>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensClientCapabilitiesRequests {
#[serde(skip_serializing_if = "Option::is_none")]
pub range: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub full: Option<SemanticTokensFullOptions>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensFullOptions {
Bool(bool),
Delta {
#[serde(skip_serializing_if = "Option::is_none")]
delta: Option<bool>,
},
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensOptions {
#[serde(flatten)]
pub work_done_progress_options: WorkDoneProgressOptions,
pub legend: SemanticTokensLegend,
#[serde(skip_serializing_if = "Option::is_none")]
pub range: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub full: Option<SemanticTokensFullOptions>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensRegistrationOptions {
#[serde(flatten)]
pub text_document_registration_options: TextDocumentRegistrationOptions,
#[serde(flatten)]
pub semantic_tokens_options: SemanticTokensOptions,
#[serde(flatten)]
pub static_registration_options: StaticRegistrationOptions,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensServerCapabilities {
SemanticTokensOptions(SemanticTokensOptions),
SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
}
impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
fn from(from: SemanticTokensOptions) -> Self {
SemanticTokensServerCapabilities::SemanticTokensOptions(from)
}
}
impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
fn from(from: SemanticTokensRegistrationOptions) -> Self {
SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensWorkspaceClientCapabilities {
pub refresh_support: Option<bool>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensParams {
#[serde(flatten)]
pub work_done_progress_params: WorkDoneProgressParams,
#[serde(flatten)]
pub partial_result_params: PartialResultParams,
pub text_document: TextDocumentIdentifier,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensDeltaParams {
#[serde(flatten)]
pub work_done_progress_params: WorkDoneProgressParams,
#[serde(flatten)]
pub partial_result_params: PartialResultParams,
pub text_document: TextDocumentIdentifier,
pub previous_result_id: String,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensRangeParams {
#[serde(flatten)]
pub work_done_progress_params: WorkDoneProgressParams,
#[serde(flatten)]
pub partial_result_params: PartialResultParams,
pub text_document: TextDocumentIdentifier,
pub range: Range,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensRangeResult {
Tokens(SemanticTokens),
Partial(SemanticTokensPartialResult),
}
impl From<SemanticTokens> for SemanticTokensRangeResult {
fn from(tokens: SemanticTokens) -> Self {
SemanticTokensRangeResult::Tokens(tokens)
}
}
impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
fn from(partial: SemanticTokensPartialResult) -> Self {
SemanticTokensRangeResult::Partial(partial)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{test_deserialization, test_serialization};
#[test]
fn test_semantic_tokens_support_serialization() {
test_serialization(
&SemanticTokens {
result_id: None,
data: vec![],
},
r#"{"data":[]}"#,
);
test_serialization(
&SemanticTokens {
result_id: None,
data: vec![SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
}],
},
r#"{"data":[2,5,3,0,3]}"#,
);
test_serialization(
&SemanticTokens {
result_id: None,
data: vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
],
},
r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
);
}
#[test]
fn test_semantic_tokens_support_deserialization() {
test_deserialization(
r#"{"data":[]}"#,
&SemanticTokens {
result_id: None,
data: vec![],
},
);
test_deserialization(
r#"{"data":[2,5,3,0,3]}"#,
&SemanticTokens {
result_id: None,
data: vec![SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
}],
},
);
test_deserialization(
r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
&SemanticTokens {
result_id: None,
data: vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
],
},
);
}
#[test]
#[should_panic]
fn test_semantic_tokens_support_deserialization_err() {
test_deserialization(
r#"{"data":[1]}"#,
&SemanticTokens {
result_id: None,
data: vec![],
},
);
}
#[test]
fn test_semantic_tokens_edit_support_deserialization() {
test_deserialization(
r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: Some(vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
]),
},
);
test_deserialization(
r#"{"start":0,"deleteCount":1}"#,
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: None,
},
);
}
#[test]
fn test_semantic_tokens_edit_support_serialization() {
test_serialization(
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: Some(vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
]),
},
r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
);
test_serialization(
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: None,
},
r#"{"start":0,"deleteCount":1}"#,
);
}
}