Merge branch 'master' into tree-sitter-helix

This commit is contained in:
Nikita Revenco 2025-02-01 15:54:33 +00:00 committed by GitHub
commit 588e8bee1a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 1251 additions and 712 deletions

29
Cargo.lock generated
View file

@ -272,7 +272,7 @@ checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
dependencies = [
"cfg-if",
"crossbeam-utils",
"hashbrown",
"hashbrown 0.14.5",
"lock_api",
"once_cell",
"parking_lot_core",
@ -794,7 +794,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "189130bc372accd02e0520dc5ab1cef318dcc2bc829b76ab8d84bbe90ac212d1"
dependencies = [
"gix-hash",
"hashbrown",
"hashbrown 0.14.5",
"parking_lot",
]
@ -830,7 +830,7 @@ dependencies = [
"gix-traverse",
"gix-utils",
"gix-validate",
"hashbrown",
"hashbrown 0.14.5",
"itoa",
"libc",
"memmap2",
@ -1286,6 +1286,12 @@ dependencies = [
"allocator-api2",
]
[[package]]
name = "hashbrown"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
[[package]]
name = "helix-core"
version = "25.1.1"
@ -1299,7 +1305,7 @@ dependencies = [
"encoding_rs",
"etcetera",
"globset",
"hashbrown",
"hashbrown 0.14.5",
"helix-loader",
"helix-parsec",
"helix-stdx",
@ -1349,7 +1355,7 @@ dependencies = [
"ahash",
"anyhow",
"futures-executor",
"hashbrown",
"hashbrown 0.14.5",
"log",
"once_cell",
"parking_lot",
@ -1427,6 +1433,7 @@ dependencies = [
"ropey",
"rustix",
"tempfile",
"unicode-segmentation",
"which",
"windows-sys 0.59.0",
]
@ -1744,17 +1751,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc9da1a252bd44cd341657203722352efc9bc0c847d06ea6d2dc1cd1135e0a01"
dependencies = [
"ahash",
"hashbrown",
"hashbrown 0.14.5",
]
[[package]]
name = "indexmap"
version = "2.5.0"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652"
dependencies = [
"equivalent",
"hashbrown",
"hashbrown 0.15.2",
]
[[package]]
@ -2217,9 +2224,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]]
name = "rustix"
version = "0.38.43"
version = "0.38.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6"
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
dependencies = [
"bitflags",
"errno",

View file

@ -43,6 +43,8 @@ slotmap = "1.0.7"
thiserror = "2.0"
tempfile = "3.15.0"
bitflags = "2.8"
unicode-segmentation = "1.2"
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
[workspace.package]
version = "25.1.1"

View file

@ -64,6 +64,7 @@
| gdscript | ✓ | ✓ | ✓ | |
| gemini | ✓ | | | |
| gherkin | ✓ | | | |
| ghostty | ✓ | | | |
| git-attributes | ✓ | | | |
| git-commit | ✓ | ✓ | | |
| git-config | ✓ | ✓ | | |

View file

@ -20,10 +20,10 @@ helix-stdx = { path = "../helix-stdx" }
helix-loader = { path = "../helix-loader" }
helix-parsec = { path = "../helix-parsec" }
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
ropey.workspace = true
smallvec = "1.13"
smartstring = "1.0.1"
unicode-segmentation = "1.12"
unicode-segmentation.workspace = true
# unicode-width is changing width definitions
# that both break our logic and disagree with common
# width definitions in terminals, we need to replace it.

View file

@ -19,10 +19,12 @@ mod test;
use unicode_segmentation::{Graphemes, UnicodeSegmentation};
use helix_stdx::rope::{RopeGraphemes, RopeSliceExt};
use crate::graphemes::{Grapheme, GraphemeStr};
use crate::syntax::Highlight;
use crate::text_annotations::TextAnnotations;
use crate::{Position, RopeGraphemes, RopeSlice};
use crate::{Position, RopeSlice};
/// TODO make Highlight a u32 to reduce the size of this enum to a single word.
#[derive(Debug, Clone, Copy)]
@ -219,7 +221,7 @@ impl<'t> DocumentFormatter<'t> {
text_fmt,
annotations,
visual_pos: Position { row: 0, col: 0 },
graphemes: RopeGraphemes::new(text.slice(block_char_idx..)),
graphemes: text.slice(block_char_idx..).graphemes(),
char_pos: block_char_idx,
exhausted: false,
indent_level: None,

View file

@ -1,7 +1,7 @@
//! Utility functions to traverse the unicode graphemes of a `Rope`'s text contents.
//!
//! Based on <https://github.com/cessen/led/blob/c4fa72405f510b7fd16052f90a598c429b3104a6/src/graphemes.rs>
use ropey::{iter::Chunks, str_utils::byte_to_char_idx, RopeSlice};
use ropey::{str_utils::byte_to_char_idx, RopeSlice};
use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete};
use unicode_width::UnicodeWidthStr;
@ -119,6 +119,9 @@ pub fn grapheme_width(g: &str) -> usize {
}
}
// NOTE: for byte indexing versions of these functions see `RopeSliceExt`'s
// `floor_grapheme_boundary` and `ceil_grapheme_boundary` and the rope grapheme iterators.
#[must_use]
pub fn nth_prev_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -> usize {
// Bounds check
@ -208,43 +211,6 @@ pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -
chunk_char_idx + tmp
}
#[must_use]
pub fn nth_next_grapheme_boundary_byte(slice: RopeSlice, mut byte_idx: usize, n: usize) -> usize {
// Bounds check
debug_assert!(byte_idx <= slice.len_bytes());
// Get the chunk with our byte index in it.
let (mut chunk, mut chunk_byte_idx, mut _chunk_char_idx, _) = slice.chunk_at_byte(byte_idx);
// Set up the grapheme cursor.
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
// Find the nth next grapheme cluster boundary.
for _ in 0..n {
loop {
match gc.next_boundary(chunk, chunk_byte_idx) {
Ok(None) => return slice.len_bytes(),
Ok(Some(n)) => {
byte_idx = n;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
chunk_byte_idx += chunk.len();
let (a, _, _c, _) = slice.chunk_at_byte(chunk_byte_idx);
chunk = a;
// chunk_char_idx = c;
}
Err(GraphemeIncomplete::PreContext(n)) => {
let ctx_chunk = slice.chunk_at_byte(n - 1).0;
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
}
_ => unreachable!(),
}
}
}
byte_idx
}
/// Finds the next grapheme boundary after the given char position.
#[must_use]
#[inline(always)]
@ -252,13 +218,6 @@ pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize {
nth_next_grapheme_boundary(slice, char_idx, 1)
}
/// Finds the next grapheme boundary after the given byte position.
#[must_use]
#[inline(always)]
pub fn next_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> usize {
nth_next_grapheme_boundary_byte(slice, byte_idx, 1)
}
/// Returns the passed char index if it's already a grapheme boundary,
/// or the next grapheme boundary char index if not.
#[must_use]
@ -311,187 +270,6 @@ pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
}
}
/// Returns whether the given byte position is a grapheme boundary.
#[must_use]
pub fn is_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> bool {
// Bounds check
debug_assert!(byte_idx <= slice.len_bytes());
// Get the chunk with our byte index in it.
let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
// Set up the grapheme cursor.
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
// Determine if the given position is a grapheme cluster boundary.
loop {
match gc.is_boundary(chunk, chunk_byte_idx) {
Ok(n) => return n,
Err(GraphemeIncomplete::PreContext(n)) => {
let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
gc.provide_context(ctx_chunk, ctx_byte_start);
}
Err(_) => unreachable!(),
}
}
}
/// An iterator over the graphemes of a `RopeSlice`.
#[derive(Clone)]
pub struct RopeGraphemes<'a> {
text: RopeSlice<'a>,
chunks: Chunks<'a>,
cur_chunk: &'a str,
cur_chunk_start: usize,
cursor: GraphemeCursor,
}
impl fmt::Debug for RopeGraphemes<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RopeGraphemes")
.field("text", &self.text)
.field("chunks", &self.chunks)
.field("cur_chunk", &self.cur_chunk)
.field("cur_chunk_start", &self.cur_chunk_start)
// .field("cursor", &self.cursor)
.finish()
}
}
impl RopeGraphemes<'_> {
#[must_use]
pub fn new(slice: RopeSlice) -> RopeGraphemes {
let mut chunks = slice.chunks();
let first_chunk = chunks.next().unwrap_or("");
RopeGraphemes {
text: slice,
chunks,
cur_chunk: first_chunk,
cur_chunk_start: 0,
cursor: GraphemeCursor::new(0, slice.len_bytes(), true),
}
}
}
impl<'a> Iterator for RopeGraphemes<'a> {
type Item = RopeSlice<'a>;
fn next(&mut self) -> Option<RopeSlice<'a>> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.next_boundary(self.cur_chunk, self.cur_chunk_start)
{
Ok(None) => {
return None;
}
Ok(Some(n)) => {
b = n;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
self.cur_chunk_start += self.cur_chunk.len();
self.cur_chunk = self.chunks.next().unwrap_or("");
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a < self.cur_chunk_start {
Some(self.text.byte_slice(a..b))
} else {
let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start;
Some((&self.cur_chunk[a2..b2]).into())
}
}
}
/// An iterator over the graphemes of a `RopeSlice` in reverse.
#[derive(Clone)]
pub struct RevRopeGraphemes<'a> {
text: RopeSlice<'a>,
chunks: Chunks<'a>,
cur_chunk: &'a str,
cur_chunk_start: usize,
cursor: GraphemeCursor,
}
impl fmt::Debug for RevRopeGraphemes<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RevRopeGraphemes")
.field("text", &self.text)
.field("chunks", &self.chunks)
.field("cur_chunk", &self.cur_chunk)
.field("cur_chunk_start", &self.cur_chunk_start)
// .field("cursor", &self.cursor)
.finish()
}
}
impl RevRopeGraphemes<'_> {
#[must_use]
pub fn new(slice: RopeSlice) -> RevRopeGraphemes {
let (mut chunks, mut cur_chunk_start, _, _) = slice.chunks_at_byte(slice.len_bytes());
chunks.reverse();
let first_chunk = chunks.next().unwrap_or("");
cur_chunk_start -= first_chunk.len();
RevRopeGraphemes {
text: slice,
chunks,
cur_chunk: first_chunk,
cur_chunk_start,
cursor: GraphemeCursor::new(slice.len_bytes(), slice.len_bytes(), true),
}
}
}
impl<'a> Iterator for RevRopeGraphemes<'a> {
type Item = RopeSlice<'a>;
fn next(&mut self) -> Option<RopeSlice<'a>> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.prev_boundary(self.cur_chunk, self.cur_chunk_start)
{
Ok(None) => {
return None;
}
Ok(Some(n)) => {
b = n;
break;
}
Err(GraphemeIncomplete::PrevChunk) => {
self.cur_chunk = self.chunks.next().unwrap_or("");
self.cur_chunk_start -= self.cur_chunk.len();
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a >= self.cur_chunk_start + self.cur_chunk.len() {
Some(self.text.byte_slice(b..a))
} else {
let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start;
Some((&self.cur_chunk[b2..a2]).into())
}
}
}
/// A highly compressed Cow<'a, str> that holds
/// atmost u31::MAX bytes and is readonly
pub struct GraphemeStr<'a> {

View file

@ -8,7 +8,7 @@ use crate::{
graphemes::{grapheme_width, tab_width_at},
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node,
Position, Rope, RopeGraphemes, RopeSlice, Tendril,
Position, Rope, RopeSlice, Tendril,
};
/// Enum representing indentation style.
@ -200,7 +200,7 @@ pub fn indent_level_for_line(line: RopeSlice, tab_width: usize, indent_width: us
/// Create a string of tabs & spaces that has the same visual width as the given RopeSlice (independent of the tab width).
fn whitespace_with_same_width(text: RopeSlice) -> String {
let mut s = String::new();
for grapheme in RopeGraphemes::new(text) {
for grapheme in text.graphemes() {
if grapheme == "\t" {
s.push('\t');
} else {

View file

@ -54,7 +54,6 @@ pub type Tendril = SmartString<smartstring::LazyCompact>;
#[doc(inline)]
pub use {regex, tree_sitter};
pub use graphemes::RopeGraphemes;
pub use position::{
char_idx_at_visual_offset, coords_at_pos, pos_at_coords, softwrapped_dimensions,
visual_offset_from_anchor, visual_offset_from_block, Position, VisualOffsetError,

View file

@ -4,10 +4,12 @@ use std::{
ops::{Add, AddAssign, Sub, SubAssign},
};
use helix_stdx::rope::RopeSliceExt;
use crate::{
chars::char_is_line_ending,
doc_formatter::{DocumentFormatter, TextFormat},
graphemes::{ensure_grapheme_boundary_prev, grapheme_width, RopeGraphemes},
graphemes::{ensure_grapheme_boundary_prev, grapheme_width},
line_ending::line_end_char_index,
text_annotations::TextAnnotations,
RopeSlice,
@ -101,7 +103,7 @@ pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position {
let line_start = text.line_to_char(line);
let pos = ensure_grapheme_boundary_prev(text, pos);
let col = RopeGraphemes::new(text.slice(line_start..pos)).count();
let col = text.slice(line_start..pos).graphemes().count();
Position::new(line, col)
}
@ -126,7 +128,7 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
let mut col = 0;
for grapheme in RopeGraphemes::new(text.slice(line_start..pos)) {
for grapheme in text.slice(line_start..pos).graphemes() {
if grapheme == "\t" {
col += tab_width - (col % tab_width);
} else {
@ -275,7 +277,7 @@ pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending
};
let mut col_char_offset = 0;
for (i, g) in RopeGraphemes::new(text.slice(line_start..line_end)).enumerate() {
for (i, g) in text.slice(line_start..line_end).graphemes().enumerate() {
if i == col {
break;
}
@ -306,7 +308,7 @@ pub fn pos_at_visual_coords(text: RopeSlice, coords: Position, tab_width: usize)
let mut col_char_offset = 0;
let mut cols_remaining = col;
for grapheme in RopeGraphemes::new(text.slice(line_start..line_end)) {
for grapheme in text.slice(line_start..line_end).graphemes() {
let grapheme_width = if grapheme == "\t" {
tab_width - ((col - cols_remaining) % tab_width)
} else {

View file

@ -9,7 +9,7 @@ use crate::{
},
line_ending::get_line_ending,
movement::Direction,
Assoc, ChangeSet, RopeGraphemes, RopeSlice,
Assoc, ChangeSet, RopeSlice,
};
use helix_stdx::range::is_subset;
use helix_stdx::rope::{self, RopeSliceExt};
@ -379,7 +379,7 @@ impl Range {
/// Returns true if this Range covers a single grapheme in the given text
pub fn is_single_grapheme(&self, doc: RopeSlice) -> bool {
let mut graphemes = RopeGraphemes::new(doc.slice(self.from()..self.to()));
let mut graphemes = doc.slice(self.from()..self.to()).graphemes();
let first = graphemes.next();
let second = graphemes.next();
first.is_some() && second.is_none()

View file

@ -3,10 +3,11 @@ mod transport;
mod types;
pub use client::{Client, ConnectionType};
pub use events::Event;
pub use transport::{Payload, Response, Transport};
pub use types::*;
use serde::de::DeserializeOwned;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
@ -18,9 +19,84 @@ pub enum Error {
Timeout(u64),
#[error("server closed the stream")]
StreamClosed,
#[error("Unhandled")]
Unhandled,
#[error(transparent)]
ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError),
#[error(transparent)]
Other(#[from] anyhow::Error),
}
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug)]
pub enum Request {
RunInTerminal(<requests::RunInTerminal as types::Request>::Arguments),
}
impl Request {
pub fn parse(command: &str, arguments: Option<serde_json::Value>) -> Result<Self> {
use crate::types::Request as _;
let arguments = arguments.unwrap_or_default();
let request = match command {
requests::RunInTerminal::COMMAND => Self::RunInTerminal(parse_value(arguments)?),
_ => return Err(Error::Unhandled),
};
Ok(request)
}
}
#[derive(Debug)]
pub enum Event {
Initialized(<events::Initialized as events::Event>::Body),
Stopped(<events::Stopped as events::Event>::Body),
Continued(<events::Continued as events::Event>::Body),
Exited(<events::Exited as events::Event>::Body),
Terminated(<events::Terminated as events::Event>::Body),
Thread(<events::Thread as events::Event>::Body),
Output(<events::Output as events::Event>::Body),
Breakpoint(<events::Breakpoint as events::Event>::Body),
Module(<events::Module as events::Event>::Body),
LoadedSource(<events::LoadedSource as events::Event>::Body),
Process(<events::Process as events::Event>::Body),
Capabilities(<events::Capabilities as events::Event>::Body),
// ProgressStart(),
// ProgressUpdate(),
// ProgressEnd(),
// Invalidated(),
Memory(<events::Memory as events::Event>::Body),
}
impl Event {
pub fn parse(event: &str, body: Option<serde_json::Value>) -> Result<Self> {
use crate::events::Event as _;
let body = body.unwrap_or_default();
let event = match event {
events::Initialized::EVENT => Self::Initialized(parse_value(body)?),
events::Stopped::EVENT => Self::Stopped(parse_value(body)?),
events::Continued::EVENT => Self::Continued(parse_value(body)?),
events::Exited::EVENT => Self::Exited(parse_value(body)?),
events::Terminated::EVENT => Self::Terminated(parse_value(body)?),
events::Thread::EVENT => Self::Thread(parse_value(body)?),
events::Output::EVENT => Self::Output(parse_value(body)?),
events::Breakpoint::EVENT => Self::Breakpoint(parse_value(body)?),
events::Module::EVENT => Self::Module(parse_value(body)?),
events::LoadedSource::EVENT => Self::LoadedSource(parse_value(body)?),
events::Process::EVENT => Self::Process(parse_value(body)?),
events::Capabilities::EVENT => Self::Capabilities(parse_value(body)?),
events::Memory::EVENT => Self::Memory(parse_value(body)?),
_ => return Err(Error::Unhandled),
};
Ok(event)
}
}
fn parse_value<T>(value: serde_json::Value) -> Result<T>
where
T: DeserializeOwned,
{
serde_json::from_value(value).map_err(|err| err.into())
}

View file

@ -1,4 +1,4 @@
use crate::{Error, Event, Result};
use crate::{Error, Result};
use anyhow::Context;
use log::{error, info, warn};
use serde::{Deserialize, Serialize};
@ -32,11 +32,17 @@ pub struct Response {
pub body: Option<Value>,
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
pub struct Event {
pub event: String,
pub body: Option<Value>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum Payload {
// type = "event"
Event(Box<Event>),
Event(Event),
// type = "response"
Response(Response),
// type = "request"

View file

@ -759,33 +759,30 @@ pub mod requests {
pub mod events {
use super::*;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "event", content = "body")]
// seq is omitted as unused and is not sent by some implementations
pub enum Event {
Initialized(Option<DebuggerCapabilities>),
Stopped(Stopped),
Continued(Continued),
Exited(Exited),
Terminated(Option<Terminated>),
Thread(Thread),
Output(Output),
Breakpoint(Breakpoint),
Module(Module),
LoadedSource(LoadedSource),
Process(Process),
Capabilities(Capabilities),
// ProgressStart(),
// ProgressUpdate(),
// ProgressEnd(),
// Invalidated(),
Memory(Memory),
pub trait Event {
type Body: serde::de::DeserializeOwned + serde::Serialize;
const EVENT: &'static str;
}
#[derive(Debug)]
pub enum Initialized {}
impl Event for Initialized {
type Body = Option<DebuggerCapabilities>;
const EVENT: &'static str = "initialized";
}
#[derive(Debug)]
pub enum Stopped {}
impl Event for Stopped {
type Body = StoppedBody;
const EVENT: &'static str = "stopped";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Stopped {
pub struct StoppedBody {
pub reason: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
@ -801,37 +798,77 @@ pub mod events {
pub hit_breakpoint_ids: Option<Vec<usize>>,
}
#[derive(Debug)]
pub enum Continued {}
impl Event for Continued {
type Body = ContinuedBody;
const EVENT: &'static str = "continued";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Continued {
pub struct ContinuedBody {
pub thread_id: ThreadId,
#[serde(skip_serializing_if = "Option::is_none")]
pub all_threads_continued: Option<bool>,
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Exited {
pub exit_code: usize,
#[derive(Debug)]
pub enum Exited {}
impl Event for Exited {
type Body = ExitedBody;
const EVENT: &'static str = "exited";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Terminated {
pub struct ExitedBody {
pub exit_code: usize,
}
#[derive(Debug)]
pub enum Terminated {}
impl Event for Terminated {
type Body = Option<TerminatedBody>;
const EVENT: &'static str = "terminated";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TerminatedBody {
#[serde(skip_serializing_if = "Option::is_none")]
pub restart: Option<Value>,
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Thread {
pub reason: String,
pub thread_id: ThreadId,
#[derive(Debug)]
pub enum Thread {}
impl Event for Thread {
type Body = ThreadBody;
const EVENT: &'static str = "thread";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Output {
pub struct ThreadBody {
pub reason: String,
pub thread_id: ThreadId,
}
#[derive(Debug)]
pub enum Output {}
impl Event for Output {
type Body = OutputBody;
const EVENT: &'static str = "output";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct OutputBody {
pub output: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
@ -849,30 +886,62 @@ pub mod events {
pub data: Option<Value>,
}
#[derive(Debug)]
pub enum Breakpoint {}
impl Event for Breakpoint {
type Body = BreakpointBody;
const EVENT: &'static str = "breakpoint";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Breakpoint {
pub struct BreakpointBody {
pub reason: String,
pub breakpoint: super::Breakpoint,
}
#[derive(Debug)]
pub enum Module {}
impl Event for Module {
type Body = ModuleBody;
const EVENT: &'static str = "module";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Module {
pub struct ModuleBody {
pub reason: String,
pub module: super::Module,
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct LoadedSource {
pub reason: String,
pub source: super::Source,
#[derive(Debug)]
pub enum LoadedSource {}
impl Event for LoadedSource {
type Body = LoadedSourceBody;
const EVENT: &'static str = "loadedSource";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Process {
pub struct LoadedSourceBody {
pub reason: String,
pub source: super::Source,
}
#[derive(Debug)]
pub enum Process {}
impl Event for Process {
type Body = ProcessBody;
const EVENT: &'static str = "process";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ProcessBody {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub system_process_id: Option<usize>,
@ -884,39 +953,55 @@ pub mod events {
pub pointer_size: Option<usize>,
}
#[derive(Debug)]
pub enum Capabilities {}
impl Event for Capabilities {
type Body = CapabilitiesBody;
const EVENT: &'static str = "capabilities";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Capabilities {
pub struct CapabilitiesBody {
pub capabilities: super::DebuggerCapabilities,
}
// #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
// #[serde(rename_all = "camelCase")]
// pub struct Invalidated {
// pub struct InvalidatedBody {
// pub areas: Vec<InvalidatedArea>,
// pub thread_id: Option<ThreadId>,
// pub stack_frame_id: Option<usize>,
// }
#[derive(Debug)]
pub enum Memory {}
impl Event for Memory {
type Body = MemoryBody;
const EVENT: &'static str = "memory";
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Memory {
pub struct MemoryBody {
pub memory_reference: String,
pub offset: usize,
pub count: usize,
}
#[test]
fn test_deserialize_module_id_from_number() {
let raw = r#"{"id": 0, "name": "Name"}"#;
let module: super::Module = serde_json::from_str(raw).expect("Error!");
assert_eq!(module.id, "0");
}
#[test]
fn test_deserialize_module_id_from_string() {
let raw = r#"{"id": "0", "name": "Name"}"#;
let module: super::Module = serde_json::from_str(raw).expect("Error!");
assert_eq!(module.id, "0");
}
}
#[test]
fn test_deserialize_module_id_from_number() {
let raw = r#"{"id": 0, "name": "Name"}"#;
let module: Module = serde_json::from_str(raw).expect("Error!");
assert_eq!(module.id, "0");
}
#[test]
fn test_deserialize_module_id_from_string() {
let raw = r#"{"id": "0", "name": "Name"}"#;
let module: Module = serde_json::from_str(raw).expect("Error!");
assert_eq!(module.id, "0");
}

View file

@ -14,12 +14,13 @@ homepage.workspace = true
[dependencies]
dunce = "1.0"
etcetera = "0.8"
ropey = { version = "1.6.1", default-features = false }
ropey.workspace = true
which = "7.0"
regex-cursor = "0.1.4"
bitflags.workspace = true
once_cell = "1.19"
regex-automata = "0.4.9"
unicode-segmentation.workspace = true
[target.'cfg(windows)'.dependencies]
windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] }

View file

@ -1,9 +1,12 @@
use std::fmt;
use std::ops::{Bound, RangeBounds};
pub use regex_cursor::engines::meta::{Builder as RegexBuilder, Regex};
pub use regex_cursor::regex_automata::util::syntax::Config;
use regex_cursor::{Input as RegexInput, RopeyCursor};
use ropey::iter::Chunks;
use ropey::RopeSlice;
use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete};
pub trait RopeSliceExt<'a>: Sized {
fn ends_with(self, text: &str) -> bool;
@ -52,6 +55,102 @@ pub trait RopeSliceExt<'a>: Sized {
/// assert_eq!(text.ceil_char_boundary(3), 3);
/// ```
fn ceil_char_boundary(self, byte_idx: usize) -> usize;
/// Checks whether the given `byte_idx` lies on a character boundary.
///
/// # Example
///
/// ```
/// # use ropey::RopeSlice;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = RopeSlice::from("⌚"); // three bytes: e2 8c 9a
/// assert!(text.is_char_boundary(0));
/// assert!(!text.is_char_boundary(1));
/// assert!(!text.is_char_boundary(2));
/// assert!(text.is_char_boundary(3));
/// ```
#[allow(clippy::wrong_self_convention)]
fn is_char_boundary(self, byte_idx: usize) -> bool;
/// Finds the closest byte index not exceeding `byte_idx` which lies on a grapheme cluster
/// boundary.
///
/// If `byte_idx` already lies on a grapheme cluster boundary then it is returned as-is. When
/// `byte_idx` lies between two grapheme cluster boundaries, this function returns the byte
/// index of the lesser / earlier / left-hand-side boundary.
///
/// `byte_idx` does not need to be aligned to a character boundary.
///
/// # Example
///
/// ```
/// # use ropey::RopeSlice;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = RopeSlice::from("\r\n"); // U+000D U+000A, hex: 0d 0a
/// assert_eq!(text.floor_grapheme_boundary(0), 0);
/// assert_eq!(text.floor_grapheme_boundary(1), 0);
/// assert_eq!(text.floor_grapheme_boundary(2), 2);
/// ```
fn floor_grapheme_boundary(self, byte_idx: usize) -> usize;
/// Finds the closest byte index not exceeding `byte_idx` which lies on a grapheme cluster
/// boundary.
///
/// If `byte_idx` already lies on a grapheme cluster boundary then it is returned as-is. When
/// `byte_idx` lies between two grapheme cluster boundaries, this function returns the byte
/// index of the greater / later / right-hand-side boundary.
///
/// `byte_idx` does not need to be aligned to a character boundary.
///
/// # Example
///
/// ```
/// # use ropey::RopeSlice;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = RopeSlice::from("\r\n"); // U+000D U+000A, hex: 0d 0a
/// assert_eq!(text.ceil_grapheme_boundary(0), 0);
/// assert_eq!(text.ceil_grapheme_boundary(1), 2);
/// assert_eq!(text.ceil_grapheme_boundary(2), 2);
/// ```
fn ceil_grapheme_boundary(self, byte_idx: usize) -> usize;
/// Checks whether the `byte_idx` lies on a grapheme cluster boundary.
///
/// # Example
///
/// ```
/// # use ropey::RopeSlice;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = RopeSlice::from("\r\n"); // U+000D U+000A, hex: 0d 0a
/// assert!(text.is_grapheme_boundary(0));
/// assert!(!text.is_grapheme_boundary(1));
/// assert!(text.is_grapheme_boundary(2));
/// ```
#[allow(clippy::wrong_self_convention)]
fn is_grapheme_boundary(self, byte_idx: usize) -> bool;
/// Returns an iterator over the grapheme clusters in the slice.
///
/// # Example
///
/// ```
/// # use ropey::RopeSlice;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = RopeSlice::from("😶‍🌫️🏴‍☠️🖼️");
/// let graphemes: Vec<_> = text.graphemes().collect();
/// assert_eq!(graphemes.as_slice(), &["😶‍🌫️", "🏴‍☠️", "🖼️"]);
/// ```
fn graphemes(self) -> RopeGraphemes<'a>;
/// Returns an iterator over the grapheme clusters in the slice, reversed.
///
/// The returned iterator starts at the end of the slice and ends at the beginning of the
/// slice.
///
/// # Example
///
/// ```
/// # use ropey::RopeSlice;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = RopeSlice::from("😶‍🌫️🏴‍☠️🖼️");
/// let graphemes: Vec<_> = text.graphemes_rev().collect();
/// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴‍☠️", "😶‍🌫️"]);
/// ```
fn graphemes_rev(self) -> RevRopeGraphemes<'a>;
}
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
@ -112,7 +211,7 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
.map(|pos| self.len_chars() - pos - 1)
}
// These two are adapted from std's `round_char_boundary` functions:
// These three are adapted from std:
fn floor_char_boundary(self, byte_idx: usize) -> usize {
if byte_idx >= self.len_bytes() {
@ -140,6 +239,127 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
.map_or(upper_bound, |pos| pos + byte_idx)
}
}
fn is_char_boundary(self, byte_idx: usize) -> bool {
if byte_idx == 0 {
return true;
}
if byte_idx >= self.len_bytes() {
byte_idx == self.len_bytes()
} else {
is_utf8_char_boundary(self.bytes_at(byte_idx).next().unwrap())
}
}
fn floor_grapheme_boundary(self, mut byte_idx: usize) -> usize {
if byte_idx >= self.len_bytes() {
return self.len_bytes();
}
byte_idx = self.ceil_char_boundary(byte_idx + 1);
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
let mut cursor = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
loop {
match cursor.prev_boundary(chunk, chunk_byte_idx) {
Ok(None) => return 0,
Ok(Some(boundary)) => return boundary,
Err(GraphemeIncomplete::PrevChunk) => {
let (ch, ch_byte_idx, _, _) = self.chunk_at_byte(chunk_byte_idx - 1);
chunk = ch;
chunk_byte_idx = ch_byte_idx;
}
Err(GraphemeIncomplete::PreContext(n)) => {
let ctx_chunk = self.chunk_at_byte(n - 1).0;
cursor.provide_context(ctx_chunk, n - ctx_chunk.len());
}
_ => unreachable!(),
}
}
}
fn ceil_grapheme_boundary(self, mut byte_idx: usize) -> usize {
if byte_idx >= self.len_bytes() {
return self.len_bytes();
}
if byte_idx == 0 {
return 0;
}
byte_idx = self.floor_char_boundary(byte_idx - 1);
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
let mut cursor = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
loop {
match cursor.next_boundary(chunk, chunk_byte_idx) {
Ok(None) => return self.len_bytes(),
Ok(Some(boundary)) => return boundary,
Err(GraphemeIncomplete::NextChunk) => {
chunk_byte_idx += chunk.len();
chunk = self.chunk_at_byte(chunk_byte_idx).0;
}
Err(GraphemeIncomplete::PreContext(n)) => {
let ctx_chunk = self.chunk_at_byte(n - 1).0;
cursor.provide_context(ctx_chunk, n - ctx_chunk.len());
}
_ => unreachable!(),
}
}
}
fn is_grapheme_boundary(self, byte_idx: usize) -> bool {
// The byte must lie on a character boundary to lie on a grapheme cluster boundary.
if !self.is_char_boundary(byte_idx) {
return false;
}
let (chunk, chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
let mut cursor = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
loop {
match cursor.is_boundary(chunk, chunk_byte_idx) {
Ok(n) => return n,
Err(GraphemeIncomplete::PreContext(n)) => {
let (ctx_chunk, ctx_byte_start, _, _) = self.chunk_at_byte(n - 1);
cursor.provide_context(ctx_chunk, ctx_byte_start);
}
Err(_) => unreachable!(),
}
}
}
fn graphemes(self) -> RopeGraphemes<'a> {
let mut chunks = self.chunks();
let first_chunk = chunks.next().unwrap_or("");
RopeGraphemes {
text: self,
chunks,
cur_chunk: first_chunk,
cur_chunk_start: 0,
cursor: GraphemeCursor::new(0, self.len_bytes(), true),
}
}
fn graphemes_rev(self) -> RevRopeGraphemes<'a> {
let (mut chunks, mut cur_chunk_start, _, _) = self.chunks_at_byte(self.len_bytes());
chunks.reverse();
let first_chunk = chunks.next().unwrap_or("");
cur_chunk_start -= first_chunk.len();
RevRopeGraphemes {
text: self,
chunks,
cur_chunk: first_chunk,
cur_chunk_start,
cursor: GraphemeCursor::new(self.len_bytes(), self.len_bytes(), true),
}
}
}
// copied from std
@ -149,6 +369,130 @@ const fn is_utf8_char_boundary(b: u8) -> bool {
(b as i8) >= -0x40
}
/// An iterator over the graphemes of a `RopeSlice`.
#[derive(Clone)]
pub struct RopeGraphemes<'a> {
text: RopeSlice<'a>,
chunks: Chunks<'a>,
cur_chunk: &'a str,
cur_chunk_start: usize,
cursor: GraphemeCursor,
}
impl fmt::Debug for RopeGraphemes<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RopeGraphemes")
.field("text", &self.text)
.field("chunks", &self.chunks)
.field("cur_chunk", &self.cur_chunk)
.field("cur_chunk_start", &self.cur_chunk_start)
// .field("cursor", &self.cursor)
.finish()
}
}
impl<'a> Iterator for RopeGraphemes<'a> {
type Item = RopeSlice<'a>;
fn next(&mut self) -> Option<Self::Item> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.next_boundary(self.cur_chunk, self.cur_chunk_start)
{
Ok(None) => {
return None;
}
Ok(Some(n)) => {
b = n;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
self.cur_chunk_start += self.cur_chunk.len();
self.cur_chunk = self.chunks.next().unwrap_or("");
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a < self.cur_chunk_start {
Some(self.text.byte_slice(a..b))
} else {
let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start;
Some((&self.cur_chunk[a2..b2]).into())
}
}
}
/// An iterator over the graphemes of a `RopeSlice` in reverse.
#[derive(Clone)]
pub struct RevRopeGraphemes<'a> {
text: RopeSlice<'a>,
chunks: Chunks<'a>,
cur_chunk: &'a str,
cur_chunk_start: usize,
cursor: GraphemeCursor,
}
impl fmt::Debug for RevRopeGraphemes<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RevRopeGraphemes")
.field("text", &self.text)
.field("chunks", &self.chunks)
.field("cur_chunk", &self.cur_chunk)
.field("cur_chunk_start", &self.cur_chunk_start)
// .field("cursor", &self.cursor)
.finish()
}
}
impl<'a> Iterator for RevRopeGraphemes<'a> {
type Item = RopeSlice<'a>;
fn next(&mut self) -> Option<Self::Item> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.prev_boundary(self.cur_chunk, self.cur_chunk_start)
{
Ok(None) => {
return None;
}
Ok(Some(n)) => {
b = n;
break;
}
Err(GraphemeIncomplete::PrevChunk) => {
self.cur_chunk = self.chunks.next().unwrap_or("");
self.cur_chunk_start -= self.cur_chunk.len();
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a >= self.cur_chunk_start + self.cur_chunk.len() {
Some(self.text.byte_slice(b..a))
} else {
let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start;
Some((&self.cur_chunk[b2..a2]).into())
}
}
}
#[cfg(test)]
mod tests {
use ropey::RopeSlice;
@ -166,12 +510,13 @@ mod tests {
}
#[test]
fn floor_ceil_char_boundary() {
fn char_boundaries() {
let ascii = RopeSlice::from("ascii");
// When the given index lies on a character boundary, the index should not change.
for byte_idx in 0..=ascii.len_bytes() {
assert_eq!(ascii.floor_char_boundary(byte_idx), byte_idx);
assert_eq!(ascii.ceil_char_boundary(byte_idx), byte_idx);
assert!(ascii.is_char_boundary(byte_idx));
}
// This is a polyfill of a method of this trait which was replaced by ceil_char_boundary.
@ -198,4 +543,44 @@ mod tests {
}
}
}
#[test]
fn grapheme_boundaries() {
let ascii = RopeSlice::from("ascii");
// When the given index lies on a grapheme boundary, the index should not change.
for byte_idx in 0..=ascii.len_bytes() {
assert_eq!(ascii.floor_char_boundary(byte_idx), byte_idx);
assert_eq!(ascii.ceil_char_boundary(byte_idx), byte_idx);
assert!(ascii.is_grapheme_boundary(byte_idx));
}
// 🏴‍☠️: U+1F3F4 U+200D U+2620 U+FE0F
// 13 bytes, hex: f0 9f 8f b4 + e2 80 8d + e2 98 a0 + ef b8 8f
let g = RopeSlice::from("🏴‍☠️\r\n");
let emoji_len = "🏴‍☠️".len();
let end = g.len_bytes();
for byte_idx in 0..emoji_len {
assert_eq!(g.floor_grapheme_boundary(byte_idx), 0);
}
for byte_idx in emoji_len..end {
assert_eq!(g.floor_grapheme_boundary(byte_idx), emoji_len);
}
assert_eq!(g.floor_grapheme_boundary(end), end);
assert_eq!(g.ceil_grapheme_boundary(0), 0);
for byte_idx in 1..=emoji_len {
assert_eq!(g.ceil_grapheme_boundary(byte_idx), emoji_len);
}
for byte_idx in emoji_len + 1..=end {
assert_eq!(g.ceil_grapheme_boundary(byte_idx), end);
}
assert!(g.is_grapheme_boundary(0));
assert!(g.is_grapheme_boundary(emoji_len));
assert!(g.is_grapheme_boundary(end));
for byte_idx in (1..emoji_len).chain(emoji_len + 1..end) {
assert!(!g.is_grapheme_boundary(byte_idx));
}
}
}

View file

@ -61,7 +61,7 @@ tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
arc-swap = { version = "1.7.1" }
termini = "1"
indexmap = "2.5"
indexmap = "2.7"
# Logging
fern = "0.7"

View file

@ -20,7 +20,7 @@ use helix_core::{
comment,
doc_formatter::TextFormat,
encoding, find_workspace,
graphemes::{self, next_grapheme_boundary, RevRopeGraphemes},
graphemes::{self, next_grapheme_boundary},
history::UndoKind,
increment,
indent::{self, IndentStyle},
@ -35,8 +35,8 @@ use helix_core::{
text_annotations::{Overlay, TextAnnotations},
textobject,
unicode::width::UnicodeWidthChar,
visual_offset_from_block, Deletion, LineEnding, Position, Range, Rope, RopeGraphemes,
RopeReader, RopeSlice, Selection, SmallVec, Syntax, Tendril, Transaction,
visual_offset_from_block, Deletion, LineEnding, Position, Range, Rope, RopeReader, RopeSlice,
Selection, SmallVec, Syntax, Tendril, Transaction,
};
use helix_view::{
document::{FormatterError, Mode, SCRATCH_BUFFER_NAME},
@ -633,10 +633,17 @@ impl std::str::FromStr for MappableCommand {
.collect::<Vec<String>>();
typed::TYPABLE_COMMAND_MAP
.get(name)
.map(|cmd| MappableCommand::Typable {
name: cmd.name.to_owned(),
doc: format!(":{} {:?}", cmd.name, args),
args,
.map(|cmd| {
let doc = if args.is_empty() {
cmd.doc.to_string()
} else {
format!(":{} {:?}", cmd.name, args)
};
MappableCommand::Typable {
name: cmd.name.to_owned(),
doc,
args,
}
})
.ok_or_else(|| anyhow!("No TypableCommand named '{}'", s))
} else if let Some(suffix) = s.strip_prefix('@') {
@ -1681,10 +1688,12 @@ fn replace(cx: &mut Context) {
if let Some(ch) = ch {
let transaction = Transaction::change_by_selection(doc.text(), selection, |range| {
if !range.is_empty() {
let text: Tendril =
RopeGraphemes::new(doc.text().slice(range.from()..range.to()))
.map(|_g| ch)
.collect();
let text: Tendril = doc
.text()
.slice(range.from()..range.to())
.graphemes()
.map(|_g| ch)
.collect();
(range.from(), range.to(), Some(text))
} else {
// No change.
@ -3512,6 +3521,10 @@ async fn make_format_callback(
}
}
Err(err) => {
if write.is_none() {
editor.set_error(err.to_string());
return;
}
log::info!("failed to format '{}': {err}", doc.display_name());
}
}
@ -3822,8 +3835,7 @@ fn goto_next_diag(cx: &mut Context) {
let diag = doc
.diagnostics()
.iter()
.find(|diag| diag.range.start > cursor_pos)
.or_else(|| doc.diagnostics().first());
.find(|diag| diag.range.start > cursor_pos);
let selection = match diag {
Some(diag) => Selection::single(diag.range.start, diag.range.end),
@ -3850,8 +3862,7 @@ fn goto_prev_diag(cx: &mut Context) {
.diagnostics()
.iter()
.rev()
.find(|diag| diag.range.start < cursor_pos)
.or_else(|| doc.diagnostics().last());
.find(|diag| diag.range.start < cursor_pos);
let selection = match diag {
// NOTE: the selection is reversed because we're jumping to the
@ -6488,6 +6499,7 @@ fn jump_to_label(cx: &mut Context, labels: Vec<Range>, behaviour: Movement) {
let alphabet = &cx.editor.config().jump_label_alphabet;
let Some(i) = event
.char()
.filter(|_| event.modifiers.is_empty())
.and_then(|ch| alphabet.iter().position(|&it| it == ch))
else {
doc_mut!(cx.editor, &doc).remove_jump_labels(view);
@ -6504,6 +6516,7 @@ fn jump_to_label(cx: &mut Context, labels: Vec<Range>, behaviour: Movement) {
let alphabet = &cx.editor.config().jump_label_alphabet;
let Some(inner) = event
.char()
.filter(|_| event.modifiers.is_empty())
.and_then(|ch| alphabet.iter().position(|&it| it == ch))
else {
return;
@ -6572,7 +6585,9 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
// madeup of word characters. The latter condition is needed because
// move_next_word_end simply treats a sequence of characters from
// the same char class as a word so `=<` would also count as a word.
let add_label = RevRopeGraphemes::new(text.slice(..cursor_fwd.head))
let add_label = text
.slice(..cursor_fwd.head)
.graphemes_rev()
.take(2)
.take_while(|g| g.chars().all(char_is_word))
.count()
@ -6598,7 +6613,9 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
// madeup of word characters. The latter condition is needed because
// move_prev_word_start simply treats a sequence of characters from
// the same char class as a word so `=<` would also count as a word.
let add_label = RopeGraphemes::new(text.slice(cursor_rev.head..))
let add_label = text
.slice(cursor_rev.head..)
.graphemes()
.take(2)
.take_while(|g| g.chars().all(char_is_word))
.count()

View file

@ -61,14 +61,19 @@ macro_rules! language_server_with_feature {
}};
}
/// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri`.
/// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri` and adds
/// the server's offset encoding.
#[derive(Debug, Clone, PartialEq, Eq)]
struct Location {
uri: Uri,
range: lsp::Range,
offset_encoding: OffsetEncoding,
}
fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
fn lsp_location_to_location(
location: lsp::Location,
offset_encoding: OffsetEncoding,
) -> Option<Location> {
let uri = match location.uri.try_into() {
Ok(uri) => uri,
Err(err) => {
@ -79,13 +84,13 @@ fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
Some(Location {
uri,
range: location.range,
offset_encoding,
})
}
struct SymbolInformationItem {
location: Location,
symbol: lsp::SymbolInformation,
offset_encoding: OffsetEncoding,
}
struct DiagnosticStyles {
@ -98,7 +103,6 @@ struct DiagnosticStyles {
struct PickerDiagnostic {
location: Location,
diag: lsp::Diagnostic,
offset_encoding: OffsetEncoding,
}
fn location_to_file_location(location: &Location) -> Option<FileLocation> {
@ -110,12 +114,7 @@ fn location_to_file_location(location: &Location) -> Option<FileLocation> {
Some((path.into(), line))
}
fn jump_to_location(
editor: &mut Editor,
location: &Location,
offset_encoding: OffsetEncoding,
action: Action,
) {
fn jump_to_location(editor: &mut Editor, location: &Location, action: Action) {
let (view, doc) = current!(editor);
push_jump(view, doc);
@ -124,7 +123,13 @@ fn jump_to_location(
editor.set_error(err);
return;
};
jump_to_position(editor, path, location.range, offset_encoding, action);
jump_to_position(
editor,
path,
location.range,
location.offset_encoding,
action,
);
}
fn jump_to_position(
@ -220,9 +225,9 @@ fn diag_picker(
location: Location {
uri: uri.clone(),
range: diag.range,
offset_encoding: ls.offset_encoding(),
},
diag,
offset_encoding: ls.offset_encoding(),
});
}
}
@ -286,7 +291,7 @@ fn diag_picker(
flat_diag,
styles,
move |cx, diag, action| {
jump_to_location(cx.editor, &diag.location, diag.offset_encoding, action);
jump_to_location(cx.editor, &diag.location, action);
let (view, doc) = current!(cx.editor);
view.diagnostics_handler
.immediately_show_diagnostic(doc, view.id);
@ -314,10 +319,10 @@ pub fn symbol_picker(cx: &mut Context) {
location: lsp::Location::new(file.uri.clone(), symbol.selection_range),
container_name: None,
},
offset_encoding,
location: Location {
uri: uri.clone(),
range: symbol.selection_range,
offset_encoding,
},
});
for child in symbol.children.into_iter().flatten() {
@ -355,9 +360,9 @@ pub fn symbol_picker(cx: &mut Context) {
location: Location {
uri: doc_uri.clone(),
range: symbol.location.range,
offset_encoding,
},
symbol,
offset_encoding,
})
.collect(),
lsp::DocumentSymbolResponse::Nested(symbols) => {
@ -410,7 +415,7 @@ pub fn symbol_picker(cx: &mut Context) {
symbols,
(),
move |cx, item, action| {
jump_to_location(cx.editor, &item.location, item.offset_encoding, action);
jump_to_location(cx.editor, &item.location, action);
},
)
.with_preview(move |_editor, item| location_to_file_location(&item.location))
@ -467,9 +472,9 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
location: Location {
uri,
range: symbol.location.range,
offset_encoding,
},
symbol,
offset_encoding,
})
})
.collect();
@ -521,7 +526,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
[],
(),
move |cx, item, action| {
jump_to_location(cx.editor, &item.location, item.offset_encoding, action);
jump_to_location(cx.editor, &item.location, action);
},
)
.with_preview(|_editor, item| location_to_file_location(&item.location))
@ -853,17 +858,12 @@ impl Display for ApplyEditErrorKind {
}
/// Precondition: `locations` should be non-empty.
fn goto_impl(
editor: &mut Editor,
compositor: &mut Compositor,
locations: Vec<Location>,
offset_encoding: OffsetEncoding,
) {
fn goto_impl(editor: &mut Editor, compositor: &mut Compositor, locations: Vec<Location>) {
let cwdir = helix_stdx::env::current_working_dir();
match locations.as_slice() {
[location] => {
jump_to_location(editor, location, offset_encoding, Action::Replace);
jump_to_location(editor, location, Action::Replace);
}
[] => unreachable!("`locations` should be non-empty for `goto_impl`"),
_locations => {
@ -880,58 +880,76 @@ fn goto_impl(
},
)];
let picker = Picker::new(columns, 0, locations, cwdir, move |cx, location, action| {
jump_to_location(cx.editor, location, offset_encoding, action)
let picker = Picker::new(columns, 0, locations, cwdir, |cx, location, action| {
jump_to_location(cx.editor, location, action)
})
.with_preview(move |_editor, location| location_to_file_location(location));
.with_preview(|_editor, location| location_to_file_location(location));
compositor.push(Box::new(overlaid(picker)));
}
}
}
fn to_locations(definitions: Option<lsp::GotoDefinitionResponse>) -> Vec<Location> {
match definitions {
Some(lsp::GotoDefinitionResponse::Scalar(location)) => {
lsp_location_to_location(location).into_iter().collect()
}
Some(lsp::GotoDefinitionResponse::Array(locations)) => locations
.into_iter()
.flat_map(lsp_location_to_location)
.collect(),
Some(lsp::GotoDefinitionResponse::Link(locations)) => locations
.into_iter()
.map(|location_link| {
lsp::Location::new(location_link.target_uri, location_link.target_range)
})
.flat_map(lsp_location_to_location)
.collect(),
None => Vec::new(),
}
}
fn goto_single_impl<P, F>(cx: &mut Context, feature: LanguageServerFeature, request_provider: P)
where
P: Fn(&Client, lsp::Position, lsp::TextDocumentIdentifier) -> Option<F>,
F: Future<Output = helix_lsp::Result<serde_json::Value>> + 'static + Send,
{
let (view, doc) = current!(cx.editor);
let (view, doc) = current_ref!(cx.editor);
let mut futures: FuturesOrdered<_> = doc
.language_servers_with_feature(feature)
.map(|language_server| {
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future = request_provider(language_server, pos, doc.identifier()).unwrap();
async move {
let json = future.await?;
let response: Option<lsp::GotoDefinitionResponse> = serde_json::from_value(json)?;
anyhow::Ok((response, offset_encoding))
}
})
.collect();
let language_server = language_server_with_feature!(cx.editor, doc, feature);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future = request_provider(language_server, pos, doc.identifier()).unwrap();
cx.callback(
future,
move |editor, compositor, response: Option<lsp::GotoDefinitionResponse>| {
let items = to_locations(response);
if items.is_empty() {
cx.jobs.callback(async move {
let mut locations = Vec::new();
while let Some((response, offset_encoding)) = futures.try_next().await? {
match response {
Some(lsp::GotoDefinitionResponse::Scalar(lsp_location)) => {
locations.extend(lsp_location_to_location(lsp_location, offset_encoding));
}
Some(lsp::GotoDefinitionResponse::Array(lsp_locations)) => {
locations.extend(
lsp_locations.into_iter().flat_map(|location| {
lsp_location_to_location(location, offset_encoding)
}),
);
}
Some(lsp::GotoDefinitionResponse::Link(lsp_locations)) => {
locations.extend(
lsp_locations
.into_iter()
.map(|location_link| {
lsp::Location::new(
location_link.target_uri,
location_link.target_range,
)
})
.flat_map(|location| {
lsp_location_to_location(location, offset_encoding)
}),
);
}
None => (),
}
}
let call = move |editor: &mut Editor, compositor: &mut Compositor| {
if locations.is_empty() {
editor.set_error("No definition found.");
} else {
goto_impl(editor, compositor, items, offset_encoding);
goto_impl(editor, compositor, locations);
}
},
);
};
Ok(Callback::EditorCompositor(Box::new(call)))
});
}
pub fn goto_declaration(cx: &mut Context) {
@ -968,38 +986,48 @@ pub fn goto_implementation(cx: &mut Context) {
pub fn goto_reference(cx: &mut Context) {
let config = cx.editor.config();
let (view, doc) = current!(cx.editor);
let (view, doc) = current_ref!(cx.editor);
// TODO could probably support multiple language servers,
// not sure if there's a real practical use case for this though
let language_server =
language_server_with_feature!(cx.editor, doc, LanguageServerFeature::GotoReference);
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future = language_server
.goto_reference(
doc.identifier(),
pos,
config.lsp.goto_reference_include_declaration,
None,
)
.unwrap();
let mut futures: FuturesOrdered<_> = doc
.language_servers_with_feature(LanguageServerFeature::GotoReference)
.map(|language_server| {
let offset_encoding = language_server.offset_encoding();
let pos = doc.position(view.id, offset_encoding);
let future = language_server
.goto_reference(
doc.identifier(),
pos,
config.lsp.goto_reference_include_declaration,
None,
)
.unwrap();
async move {
let json = future.await?;
let locations: Option<Vec<lsp::Location>> = serde_json::from_value(json)?;
anyhow::Ok((locations, offset_encoding))
}
})
.collect();
cx.callback(
future,
move |editor, compositor, response: Option<Vec<lsp::Location>>| {
let items: Vec<Location> = response
.into_iter()
.flatten()
.flat_map(lsp_location_to_location)
.collect();
if items.is_empty() {
cx.jobs.callback(async move {
let mut locations = Vec::new();
while let Some((lsp_locations, offset_encoding)) = futures.try_next().await? {
locations.extend(
lsp_locations
.into_iter()
.flatten()
.flat_map(|location| lsp_location_to_location(location, offset_encoding)),
);
}
let call = move |editor: &mut Editor, compositor: &mut Compositor| {
if locations.is_empty() {
editor.set_error("No references found.");
} else {
goto_impl(editor, compositor, items, offset_encoding);
goto_impl(editor, compositor, locations);
}
},
);
};
Ok(Callback::EditorCompositor(Box::new(call)))
});
}
pub fn signature_help(cx: &mut Context) {

View file

@ -137,9 +137,12 @@ impl Compositor {
}
pub fn handle_event(&mut self, event: &Event, cx: &mut Context) -> bool {
// If it is a key event and a macro is being recorded, push the key event to the recording.
// If it is a key event, a macro is being recorded, and a macro isn't being replayed,
// push the key event to the recording.
if let (Event::Key(key), Some((_, keys))) = (event, &mut cx.editor.macro_recording) {
keys.push(*key);
if cx.editor.macro_replaying.is_empty() {
keys.push(*key);
}
}
let mut callbacks = Vec::new();

View file

@ -72,6 +72,11 @@ pub(crate) fn path_completion(
return Vec::new();
};
let edit_diff = typed_file_name
.as_ref()
.map(|s| s.chars().count())
.unwrap_or_default();
read_dir
.filter_map(Result::ok)
.filter_map(|dir_entry| {
@ -88,11 +93,6 @@ pub(crate) fn path_completion(
let kind = path_kind(&md);
let documentation = path_documentation(&md, &dir_path.join(&file_name), kind);
let edit_diff = typed_file_name
.as_ref()
.map(|f| f.len())
.unwrap_or_default();
let transaction = Transaction::change_by_selection(&text, &selection, |range| {
let cursor = range.cursor(text.slice(..));
(cursor - edit_diff, cursor, Some((&file_name).into()))

View file

@ -5,7 +5,6 @@ use helix_core::syntax;
use helix_lsp::lsp;
use helix_view::graphics::{Margin, Rect, Style};
use helix_view::input::Event;
use once_cell::sync::OnceCell;
use tui::buffer::Buffer;
use tui::widgets::{BorderType, Paragraph, Widget, Wrap};
@ -15,11 +14,8 @@ use crate::alt;
use crate::ui::Markdown;
pub struct Hover {
hovers: Vec<(String, lsp::Hover)>,
active_index: usize,
config_loader: Arc<ArcSwap<syntax::Loader>>,
content: OnceCell<(Option<Markdown>, Markdown)>,
contents: Vec<(Option<Markdown>, Markdown)>,
}
impl Hover {
@ -29,42 +25,42 @@ impl Hover {
hovers: Vec<(String, lsp::Hover)>,
config_loader: Arc<ArcSwap<syntax::Loader>>,
) -> Self {
let n_hovers = hovers.len();
let contents = hovers
.into_iter()
.enumerate()
.map(|(idx, (server_name, hover))| {
let header = (n_hovers > 1).then(|| {
Markdown::new(
format!("**[{}/{}] {}**", idx + 1, n_hovers, server_name),
config_loader.clone(),
)
});
let body = Markdown::new(
hover_contents_to_string(hover.contents),
config_loader.clone(),
);
(header, body)
})
.collect();
Self {
hovers,
active_index: usize::default(),
config_loader,
content: OnceCell::new(),
contents,
}
}
fn has_header(&self) -> bool {
self.contents.len() > 1
}
fn content(&self) -> &(Option<Markdown>, Markdown) {
self.content.get_or_init(|| {
let (server_name, hover) = &self.hovers[self.active_index];
// Only render the header when there is more than one hover response.
let header = (self.hovers.len() > 1).then(|| {
Markdown::new(
format!(
"**[{}/{}] {}**",
self.active_index + 1,
self.hovers.len(),
server_name
),
self.config_loader.clone(),
)
});
let body = Markdown::new(
hover_contents_to_string(&hover.contents),
self.config_loader.clone(),
);
(header, body)
})
&self.contents[self.active_index]
}
fn set_index(&mut self, index: usize) {
assert!((0..self.hovers.len()).contains(&index));
assert!((0..self.contents.len()).contains(&index));
self.active_index = index;
// Reset the cached markdown:
self.content.take();
}
}
@ -100,13 +96,11 @@ impl Component for Hover {
// hover content
let contents = contents.parse(Some(&cx.editor.theme));
let contents_area = area
.clip_top(if self.hovers.len() > 1 {
HEADER_HEIGHT + SEPARATOR_HEIGHT
} else {
0
})
.clip_bottom(u16::from(cx.editor.popup_border()));
let contents_area = area.clip_top(if self.has_header() {
HEADER_HEIGHT + SEPARATOR_HEIGHT
} else {
0
});
let contents_para = Paragraph::new(&contents)
.wrap(Wrap { trim: false })
.scroll((cx.scroll.unwrap_or_default() as u16, 0));
@ -132,7 +126,7 @@ impl Component for Hover {
crate::ui::text::required_size(&contents, max_text_width);
let width = PADDING_HORIZONTAL + header_width.max(content_width);
let height = if self.hovers.len() > 1 {
let height = if self.has_header() {
PADDING_TOP + HEADER_HEIGHT + SEPARATOR_HEIGHT + content_height + PADDING_BOTTOM
} else {
PADDING_TOP + content_height + PADDING_BOTTOM
@ -151,12 +145,12 @@ impl Component for Hover {
let index = self
.active_index
.checked_sub(1)
.unwrap_or(self.hovers.len() - 1);
.unwrap_or(self.contents.len() - 1);
self.set_index(index);
EventResult::Consumed(None)
}
alt!('n') => {
self.set_index((self.active_index + 1) % self.hovers.len());
self.set_index((self.active_index + 1) % self.contents.len());
EventResult::Consumed(None)
}
_ => EventResult::Ignored(None),
@ -164,13 +158,13 @@ impl Component for Hover {
}
}
fn hover_contents_to_string(contents: &lsp::HoverContents) -> String {
fn marked_string_to_markdown(contents: &lsp::MarkedString) -> String {
fn hover_contents_to_string(contents: lsp::HoverContents) -> String {
fn marked_string_to_markdown(contents: lsp::MarkedString) -> String {
match contents {
lsp::MarkedString::String(contents) => contents.clone(),
lsp::MarkedString::String(contents) => contents,
lsp::MarkedString::LanguageString(string) => {
if string.language == "markdown" {
string.value.clone()
string.value
} else {
format!("```{}\n{}\n```", string.language, string.value)
}
@ -180,10 +174,10 @@ fn hover_contents_to_string(contents: &lsp::HoverContents) -> String {
match contents {
lsp::HoverContents::Scalar(contents) => marked_string_to_markdown(contents),
lsp::HoverContents::Array(contents) => contents
.iter()
.into_iter()
.map(marked_string_to_markdown)
.collect::<Vec<_>>()
.join("\n\n"),
lsp::HoverContents::Markup(contents) => contents.value.clone(),
lsp::HoverContents::Markup(contents) => contents.value,
}
}

View file

@ -793,3 +793,30 @@ fn foo() {
Ok(())
}
#[tokio::test(flavor = "multi_thread")]
async fn macro_play_within_macro_record() -> anyhow::Result<()> {
// <https://github.com/helix-editor/helix/issues/12697>
//
// * `"aQihello<esc>Q` record a macro to register 'a' which inserts "hello"
// * `Q"aq<space>world<esc>Q` record a macro to the default macro register which plays the
// macro in register 'a' and then inserts " world"
// * `%d` clear the buffer
// * `q` replay the macro in the default macro register
// * `i<ret>` add a newline at the end
//
// The inner macro in register 'a' should replay within the outer macro exactly once to insert
// "hello world".
test((
indoc! {"\
#[|]#
"},
r#""aQihello<esc>QQ"aqi<space>world<esc>Q%dqi<ret>"#,
indoc! {"\
hello world
#[|]#"},
))
.await?;
Ok(())
}

View file

@ -20,7 +20,7 @@ helix-core = { path = "../helix-core" }
bitflags.workspace = true
cassowary = "0.3"
unicode-segmentation = "1.12"
unicode-segmentation.workspace = true
crossterm = { version = "0.28", optional = true }
termini = "1.0"
serde = { version = "1", "optional" = true, features = ["derive"]}

View file

@ -122,10 +122,11 @@ mod external {
Self::Tmux
} else if binary_exists("pbcopy") && binary_exists("pbpaste") {
Self::Pasteboard
} else if cfg!(feature = "term") {
Self::Termcode
} else {
Self::None
#[cfg(feature = "term")]
return Self::Termcode;
#[cfg(not(feature = "term"))]
return Self::None;
}
}

View file

@ -363,7 +363,7 @@ pub struct Config {
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq, PartialOrd, Ord)]
#[serde(rename_all = "kebab-case", default)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct SmartTabConfig {
pub enable: bool,
pub supersede_menu: bool,

View file

@ -342,6 +342,7 @@ impl FromStr for UnderlineStyle {
}
}
#[cfg(feature = "term")]
impl From<UnderlineStyle> for crossterm::style::Attribute {
fn from(style: UnderlineStyle) -> Self {
match style {

View file

@ -5,6 +5,7 @@ use helix_core::Selection;
use helix_dap::{self as dap, Client, ConnectionType, Payload, Request, ThreadId};
use helix_lsp::block_on;
use log::warn;
use serde_json::json;
use std::fmt::Write;
use std::path::PathBuf;
@ -141,7 +142,6 @@ pub fn breakpoints_changed(
impl Editor {
pub async fn handle_debugger_message(&mut self, payload: helix_dap::Payload) -> bool {
use dap::requests::RunInTerminal;
use helix_dap::{events, Event};
let debugger = match self.debugger.as_mut() {
@ -149,250 +149,260 @@ impl Editor {
None => return false,
};
match payload {
Payload::Event(ev) => match *ev {
Event::Stopped(events::Stopped {
thread_id,
description,
text,
reason,
all_threads_stopped,
..
}) => {
let all_threads_stopped = all_threads_stopped.unwrap_or_default();
if all_threads_stopped {
if let Ok(response) = debugger.request::<dap::requests::Threads>(()).await {
for thread in response.threads {
fetch_stack_trace(debugger, thread.id).await;
}
select_thread_id(self, thread_id.unwrap_or_default(), false).await;
}
} else if let Some(thread_id) = thread_id {
debugger.thread_states.insert(thread_id, reason.clone()); // TODO: dap uses "type" || "reason" here
// whichever thread stops is made "current" (if no previously selected thread).
select_thread_id(self, thread_id, false).await;
}
let scope = match thread_id {
Some(id) => format!("Thread {}", id),
None => "Target".to_owned(),
};
let mut status = format!("{} stopped because of {}", scope, reason);
if let Some(desc) = description {
write!(status, " {}", desc).unwrap();
}
if let Some(text) = text {
write!(status, " {}", text).unwrap();
}
if all_threads_stopped {
status.push_str(" (all threads stopped)");
}
self.set_status(status);
}
Event::Continued(events::Continued { thread_id, .. }) => {
debugger
.thread_states
.insert(thread_id, "running".to_owned());
if debugger.thread_id == Some(thread_id) {
debugger.resume_application();
}
}
Event::Thread(_) => {
// TODO: update thread_states, make threads request
}
Event::Breakpoint(events::Breakpoint { reason, breakpoint }) => {
match &reason[..] {
"new" => {
if let Some(source) = breakpoint.source {
self.breakpoints
.entry(source.path.unwrap()) // TODO: no unwraps
.or_default()
.push(Breakpoint {
id: breakpoint.id,
verified: breakpoint.verified,
message: breakpoint.message,
line: breakpoint.line.unwrap().saturating_sub(1), // TODO: no unwrap
column: breakpoint.column,
..Default::default()
});
}
}
"changed" => {
for breakpoints in self.breakpoints.values_mut() {
if let Some(i) =
breakpoints.iter().position(|b| b.id == breakpoint.id)
{
breakpoints[i].verified = breakpoint.verified;
breakpoints[i].message = breakpoint
.message
.clone()
.or_else(|| breakpoints[i].message.take());
breakpoints[i].line = breakpoint
.line
.map_or(breakpoints[i].line, |line| line.saturating_sub(1));
breakpoints[i].column =
breakpoint.column.or(breakpoints[i].column);
}
}
}
"removed" => {
for breakpoints in self.breakpoints.values_mut() {
if let Some(i) =
breakpoints.iter().position(|b| b.id == breakpoint.id)
{
breakpoints.remove(i);
}
}
}
reason => {
warn!("Unknown breakpoint event: {}", reason);
}
}
}
Event::Output(events::Output {
category, output, ..
}) => {
let prefix = match category {
Some(category) => {
if &category == "telemetry" {
return false;
}
format!("Debug ({}):", category)
}
None => "Debug:".to_owned(),
};
log::info!("{}", output);
self.set_status(format!("{} {}", prefix, output));
}
Event::Initialized(_) => {
// send existing breakpoints
for (path, breakpoints) in &mut self.breakpoints {
// TODO: call futures in parallel, await all
let _ = breakpoints_changed(debugger, path.clone(), breakpoints);
}
// TODO: fetch breakpoints (in case we're attaching)
if debugger.configuration_done().await.is_ok() {
self.set_status("Debugged application started");
}; // TODO: do we need to handle error?
}
Event::Terminated(terminated) => {
let restart_args = if let Some(terminated) = terminated {
terminated.restart
} else {
None
};
let disconnect_args = Some(DisconnectArguments {
restart: Some(restart_args.is_some()),
terminate_debuggee: None,
suspend_debuggee: None,
});
if let Err(err) = debugger.disconnect(disconnect_args).await {
self.set_error(format!(
"Cannot disconnect debugger upon terminated event receival {:?}",
err
));
Payload::Event(event) => {
let event = match Event::parse(&event.event, event.body) {
Ok(event) => event,
Err(dap::Error::Unhandled) => {
log::info!("Discarding unknown DAP event '{}'", event.event);
return false;
}
Err(err) => {
log::warn!("Discarding invalid DAP event '{}': {err}", event.event);
return false;
}
};
match event {
Event::Stopped(events::StoppedBody {
thread_id,
description,
text,
reason,
all_threads_stopped,
..
}) => {
let all_threads_stopped = all_threads_stopped.unwrap_or_default();
match restart_args {
Some(restart_args) => {
log::info!("Attempting to restart debug session.");
let connection_type = match debugger.connection_type() {
Some(connection_type) => connection_type,
None => {
self.set_error("No starting request found, to be used in restarting the debugging session.");
return false;
if all_threads_stopped {
if let Ok(response) =
debugger.request::<dap::requests::Threads>(()).await
{
for thread in response.threads {
fetch_stack_trace(debugger, thread.id).await;
}
};
select_thread_id(self, thread_id.unwrap_or_default(), false).await;
}
} else if let Some(thread_id) = thread_id {
debugger.thread_states.insert(thread_id, reason.clone()); // TODO: dap uses "type" || "reason" here
let relaunch_resp = if let ConnectionType::Launch = connection_type {
debugger.launch(restart_args).await
} else {
debugger.attach(restart_args).await
};
// whichever thread stops is made "current" (if no previously selected thread).
select_thread_id(self, thread_id, false).await;
}
if let Err(err) = relaunch_resp {
self.set_error(format!(
"Failed to restart debugging session: {:?}",
err
));
let scope = match thread_id {
Some(id) => format!("Thread {}", id),
None => "Target".to_owned(),
};
let mut status = format!("{} stopped because of {}", scope, reason);
if let Some(desc) = description {
write!(status, " {}", desc).unwrap();
}
if let Some(text) = text {
write!(status, " {}", text).unwrap();
}
if all_threads_stopped {
status.push_str(" (all threads stopped)");
}
self.set_status(status);
}
Event::Continued(events::ContinuedBody { thread_id, .. }) => {
debugger
.thread_states
.insert(thread_id, "running".to_owned());
if debugger.thread_id == Some(thread_id) {
debugger.resume_application();
}
}
Event::Thread(_) => {
// TODO: update thread_states, make threads request
}
Event::Breakpoint(events::BreakpointBody { reason, breakpoint }) => {
match &reason[..] {
"new" => {
if let Some(source) = breakpoint.source {
self.breakpoints
.entry(source.path.unwrap()) // TODO: no unwraps
.or_default()
.push(Breakpoint {
id: breakpoint.id,
verified: breakpoint.verified,
message: breakpoint.message,
line: breakpoint.line.unwrap().saturating_sub(1), // TODO: no unwrap
column: breakpoint.column,
..Default::default()
});
}
}
"changed" => {
for breakpoints in self.breakpoints.values_mut() {
if let Some(i) =
breakpoints.iter().position(|b| b.id == breakpoint.id)
{
breakpoints[i].verified = breakpoint.verified;
breakpoints[i].message = breakpoint
.message
.clone()
.or_else(|| breakpoints[i].message.take());
breakpoints[i].line =
breakpoint.line.map_or(breakpoints[i].line, |line| {
line.saturating_sub(1)
});
breakpoints[i].column =
breakpoint.column.or(breakpoints[i].column);
}
}
}
"removed" => {
for breakpoints in self.breakpoints.values_mut() {
if let Some(i) =
breakpoints.iter().position(|b| b.id == breakpoint.id)
{
breakpoints.remove(i);
}
}
}
reason => {
warn!("Unknown breakpoint event: {}", reason);
}
}
None => {
self.debugger = None;
self.set_status(
"Terminated debugging session and disconnected debugger.",
);
}
Event::Output(events::OutputBody {
category, output, ..
}) => {
let prefix = match category {
Some(category) => {
if &category == "telemetry" {
return false;
}
format!("Debug ({}):", category)
}
None => "Debug:".to_owned(),
};
log::info!("{}", output);
self.set_status(format!("{} {}", prefix, output));
}
Event::Initialized(_) => {
// send existing breakpoints
for (path, breakpoints) in &mut self.breakpoints {
// TODO: call futures in parallel, await all
let _ = breakpoints_changed(debugger, path.clone(), breakpoints);
}
// TODO: fetch breakpoints (in case we're attaching)
if debugger.configuration_done().await.is_ok() {
self.set_status("Debugged application started");
}; // TODO: do we need to handle error?
}
Event::Terminated(terminated) => {
let restart_args = if let Some(terminated) = terminated {
terminated.restart
} else {
None
};
let disconnect_args = Some(DisconnectArguments {
restart: Some(restart_args.is_some()),
terminate_debuggee: None,
suspend_debuggee: None,
});
if let Err(err) = debugger.disconnect(disconnect_args).await {
self.set_error(format!(
"Cannot disconnect debugger upon terminated event receival {:?}",
err
));
return false;
}
match restart_args {
Some(restart_args) => {
log::info!("Attempting to restart debug session.");
let connection_type = match debugger.connection_type() {
Some(connection_type) => connection_type,
None => {
self.set_error("No starting request found, to be used in restarting the debugging session.");
return false;
}
};
let relaunch_resp = if let ConnectionType::Launch = connection_type
{
debugger.launch(restart_args).await
} else {
debugger.attach(restart_args).await
};
if let Err(err) = relaunch_resp {
self.set_error(format!(
"Failed to restart debugging session: {:?}",
err
));
}
}
None => {
self.debugger = None;
self.set_status(
"Terminated debugging session and disconnected debugger.",
);
}
}
}
}
Event::Exited(resp) => {
let exit_code = resp.exit_code;
if exit_code != 0 {
self.set_error(format!(
"Debuggee failed to exit successfully (exit code: {exit_code})."
));
Event::Exited(resp) => {
let exit_code = resp.exit_code;
if exit_code != 0 {
self.set_error(format!(
"Debuggee failed to exit successfully (exit code: {exit_code})."
));
}
}
ev => {
log::warn!("Unhandled event {:?}", ev);
return false; // return early to skip render
}
}
ev => {
log::warn!("Unhandled event {:?}", ev);
return false; // return early to skip render
}
},
}
Payload::Response(_) => unreachable!(),
Payload::Request(request) => match request.command.as_str() {
RunInTerminal::COMMAND => {
let arguments: dap::requests::RunInTerminalArguments =
serde_json::from_value(request.arguments.unwrap_or_default()).unwrap();
// TODO: no unwrap
let config = match self.config().terminal.clone() {
Some(config) => config,
None => {
Payload::Request(request) => {
let reply = match Request::parse(&request.command, request.arguments) {
Ok(Request::RunInTerminal(arguments)) => {
let config = self.config();
let Some(config) = config.terminal.as_ref() else {
self.set_error("No external terminal defined");
return true;
}
};
};
// Re-borrowing debugger to avoid issues when loading config
let debugger = match self.debugger.as_mut() {
Some(debugger) => debugger,
None => return false,
};
let process = match std::process::Command::new(&config.command)
.args(&config.args)
.arg(arguments.args.join(" "))
.spawn()
{
Ok(process) => process,
Err(err) => {
self.set_error(format!(
"Error starting external terminal: {}",
err
));
return true;
}
};
let process = match std::process::Command::new(config.command)
.args(config.args)
.arg(arguments.args.join(" "))
.spawn()
{
Ok(process) => process,
Err(err) => {
self.set_error(format!("Error starting external terminal: {}", err));
return true;
}
};
Ok(json!(dap::requests::RunInTerminalResponse {
process_id: Some(process.id()),
shell_process_id: None,
}))
}
Err(err) => Err(err),
};
let _ = debugger
.reply(
request.seq,
dap::requests::RunInTerminal::COMMAND,
serde_json::to_value(dap::requests::RunInTerminalResponse {
process_id: Some(process.id()),
shell_process_id: None,
})
.map_err(|e| e.into()),
)
.await;
if let Some(debugger) = self.debugger.as_mut() {
debugger
.reply(request.seq, &request.command, reply)
.await
.ok();
}
_ => log::error!("DAP reverse request not implemented: {:?}", request),
},
}
}
true
}

View file

@ -1334,6 +1334,8 @@ scope = "source.svelte"
injection-regex = "svelte"
file-types = ["svelte"]
indent = { tab-width = 2, unit = " " }
comment-token = "//"
block-comment-tokens = { start = "/*", end = "*/" }
language-servers = [ "svelteserver" ]
[[grammar]]
@ -2199,7 +2201,7 @@ language-servers = [ "cairo-language-server" ]
[[grammar]]
name = "cairo"
source = { git = "https://github.com/starkware-libs/tree-sitter-cairo", rev = "e3a0212261c125cb38248458cd856c0ffee2b398" }
source = { git = "https://github.com/starkware-libs/tree-sitter-cairo", rev = "4c6a25680546761b80a710ead1dd34e76c203125" }
[[language]]
name = "cpon"
@ -2829,7 +2831,6 @@ file-types = [
"network",
{ glob = ".editorconfig" },
{ glob = ".npmrc" },
{ glob = "ghostty/config" },
{ glob = "hgrc" },
{ glob = "npmrc" },
{ glob = "rclone.conf" },
@ -4087,3 +4088,14 @@ file-types = ["multicursor"]
[[grammar]]
name = "multicursor"
source = { git = "https://github.com/nik-rev/tree-sitter-multicursor", rev = "f6f868ecfe3de2fb2cd815cb40be55493f699a49" }
[[language]]
name = "ghostty"
scope = "source.ghostty"
file-types = [{ glob = "ghostty/config" }]
comment-tokens = "#"
indent = { tab-width = 2, unit = " " }
[[grammar]]
name = "ghostty"
source = { git = "https://github.com/bezhermoso/tree-sitter-ghostty" , rev = "8438a93b44367e962b2ea3a3b6511885bebd196a" }

View file

@ -71,6 +71,8 @@
"<"
">"
] @punctuation.bracket)
(closure_parameters
"|" @punctuation.bracket)
; ---
; Variables
@ -92,6 +94,8 @@
(parameter
pattern: (identifier) @variable.parameter)
(closure_parameters
(identifier) @variable.parameter)
; -------
; Keywords
; -------

View file

@ -115,6 +115,16 @@
(#not-same-line? @expr-start @pattern-guard)
) @indent
; Align closure parameters if they span more than one line
(closure_parameters
"|"
.
(_) @anchor
(_) @expr-end
.
(#not-same-line? @anchor @expr-end)
) @align
(for_expression
"in" @in
.

View file

@ -7,6 +7,7 @@
(type_item)
(trait_item)
(impl_item)
(closure_expression)
(block)
] @local.scope
@ -20,6 +21,8 @@
(constrained_type_parameter
left: (type_identifier) @local.definition)
(closure_parameters (identifier) @local.definition)
; References
(identifier) @local.reference
(type_identifier) @local.reference

View file

@ -1,6 +1,9 @@
(function_item
body: (_) @function.inside) @function.around
(closure_expression
body: (_) @function.inside) @function.around
(struct_item
body: (_) @class.inside) @class.around
@ -16,6 +19,9 @@
(parameters
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
(closure_parameters
((_) @parameter.inside . ","? @parameter.around) @parameter.around)
(type_parameters
((_) @parameter.inside . ","? @parameter.around) @parameter.around)

View file

@ -0,0 +1,68 @@
; extends
; Comments
(comment) @comment
; Keys
(property) @variable
; Values
(boolean) @constant.builtin.boolean
[
(number)
(adjustment)
] @constant.numeric
[
"+"
"="
(keybind_trigger ">")
] @operator
(":") @punctuation.delimiter
; (color) are hex values
(color "#" @punctuation.special
(#eq? @punctuation.special "#"))
(path_value "?" @keyword.control.conditional
(#eq? @keyword.control.conditional "?"))
; `palette`
(palette_index) @variable.other.member
; `path_directive`
(path_directive (property) @keyword.import)
(path_directive (path_value (string) @string.special.path ))
(action_name) @function.builtin
(action_argument (string) @variable.parameter )
; (tuple)
(tuple "," @punctuation.delimiter.special
(#eq? @punctuation.delimiter.special ","))
[
(string)
(color)
] @string
; clear is a special keyword that clear all existing keybind up to that point
((keybind_value) @keyword
(#eq? @keyword "clear"))
; `keybind`
(keybind_value) @string.special
; NOTE: The order here matters!
[
(key_qualifier)
(keybind_modifier)
] @attribute
[
(modifier_key)
(key)
] @constant.builtin

View file

@ -203,6 +203,13 @@
(builtin_identifier) @keyword.control.import
(#any-of? @keyword.control.import "@import" "@cImport")))
(variable_declaration
(identifier) @variable ; TODO: module
(field_expression
object: (builtin_function
(builtin_identifier) @keyword.control.import
(#any-of? @keyword.control.import "@import" "@cImport"))))
; Functions
(call_expression
@ -223,6 +230,10 @@
.
(identifier) @variable.other.member)
(field_expression
(_)
member: (identifier) @type (#match? @type "^[A-Z_][a-zA-Z0-9_]*"))
(field_expression
(_)
member: (identifier) @variable.other.member)
@ -266,7 +277,7 @@
type: (identifier) @type)
((identifier) @type
(#lua-match? @type "^[A-Z_][a-zA-Z0-9_]*"))
(#match? @type "^[A-Z_][a-zA-Z0-9_]*"))
(variable_declaration
(identifier) @type