-
Notifications
You must be signed in to change notification settings - Fork 57
Support @schema url
#952
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Support @schema url
#952
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,7 +1,38 @@ | ||
| use crate::FileId; | ||
| use url::Url; | ||
|
|
||
| use crate::LuaTypeDeclId; | ||
|
|
||
| #[derive(Debug, Clone)] | ||
| pub enum JsonSchemaFile { | ||
| NeedResolve, | ||
| Resolved(FileId), | ||
| BadUrl, | ||
| Resolved(LuaTypeDeclId), | ||
| } | ||
|
|
||
| pub fn get_schema_short_name(url: &Url) -> String { | ||
| const MAX_LEN: usize = 64; | ||
|
|
||
| let url_str = url.as_str(); | ||
| let mut new_name = String::new(); | ||
| for c in url_str.chars().rev() { | ||
| if new_name.len() >= MAX_LEN { | ||
| break; | ||
| } | ||
|
|
||
| if c.is_alphanumeric() || c == '-' || c == '_' || c == '.' { | ||
| new_name.push(c); | ||
| } else if !c.is_control() && c != ' ' { | ||
| new_name.push('_'); | ||
| } | ||
| } | ||
|
|
||
| let mut result: String = new_name.chars().rev().collect(); | ||
|
|
||
| result = result.trim_matches(|c| c == '_' || c == '.').to_string(); | ||
|
|
||
| if result.is_empty() { | ||
| return "schema".to_string(); | ||
| } | ||
|
|
||
| result | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -30,7 +30,9 @@ pub use profile::Profile; | |
| pub use resources::get_best_resources_dir; | ||
| pub use resources::load_resource_from_include_dir; | ||
| use resources::load_resource_std; | ||
| use schema_to_emmylua::SchemaConverter; | ||
| pub use semantic::*; | ||
| use std::collections::HashMap; | ||
| use std::{collections::HashSet, path::PathBuf, sync::Arc}; | ||
| pub use test_lib::VirtualWorkspace; | ||
| use tokio_util::sync::CancellationToken; | ||
|
|
@@ -260,6 +262,86 @@ impl EmmyLuaAnalysis { | |
| self.remove_file_by_uri(&uri); | ||
| } | ||
| } | ||
|
|
||
| pub fn check_schema_update(&self) -> bool { | ||
| self.compilation | ||
| .get_db() | ||
| .get_json_schema_index() | ||
| .has_need_resolve_schemas() | ||
| } | ||
|
|
||
| pub async fn update_schema(&mut self) { | ||
| let urls = self | ||
| .compilation | ||
| .get_db() | ||
| .get_json_schema_index() | ||
| .get_need_resolve_schemas(); | ||
| let mut url_contents = HashMap::new(); | ||
| for url in urls { | ||
| if url.scheme() == "file" { | ||
| if let Ok(path) = url.to_file_path() { | ||
| if path.exists() { | ||
| let result = read_file_with_encoding(&path, "utf-8"); | ||
| if let Some(content) = result { | ||
| url_contents.insert(url.clone(), content); | ||
| } else { | ||
| log::error!("Failed to read schema file: {:?}", url); | ||
| } | ||
| } | ||
| } | ||
| } else { | ||
| let result = reqwest::get(url.as_str()).await; | ||
| if let Ok(response) = result { | ||
| if let Ok(content) = response.text().await { | ||
| url_contents.insert(url.clone(), content); | ||
| } else { | ||
| log::error!("Failed to read schema content from URL: {:?}", url); | ||
| } | ||
| } else { | ||
| log::error!("Failed to fetch schema from URL: {:?}", url); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| if url_contents.is_empty() { | ||
| return; | ||
| } | ||
|
|
||
| let work_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); | ||
| let converter = SchemaConverter::new(true); | ||
| for (url, json_content) in url_contents { | ||
| let short_name = get_schema_short_name(&url); | ||
| match converter.convert_from_str(&json_content) { | ||
| Ok(convert_result) => { | ||
| let path = work_dir.join(short_name); | ||
| let Some(file_id) = | ||
| self.update_file_by_path(&path, Some(convert_result.annotation_text)) | ||
| else { | ||
| continue; | ||
| }; | ||
|
Comment on lines
+310
to
+321
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Using let converter = SchemaConverter::new(true);
for (url, json_content) in url_contents {
match converter.convert_from_str(&json_content) {
Ok(convert_result) => {
let Ok(uri) = Uri::parse(&format!("emmylua-schema:{}", url.as_str())) else {
log::error!("Failed to create schema URI for {}", url);
continue;
};
let Some(file_id) =
self.update_file_by_uri(&uri, Some(convert_result.annotation_text))
else {
continue;
}; |
||
| if let Some(f) = self | ||
| .compilation | ||
| .get_db_mut() | ||
| .get_json_schema_index_mut() | ||
| .get_schema_file_mut(&url) | ||
| { | ||
| *f = JsonSchemaFile::Resolved(LuaTypeDeclId::local( | ||
| file_id, | ||
| &convert_result.root_type_name, | ||
| )); | ||
| } | ||
| } | ||
| Err(e) => { | ||
| log::error!("Failed to convert schema from URL {:?}: {}", url, e); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| self.compilation | ||
| .get_db_mut() | ||
| .get_json_schema_index_mut() | ||
| .reset_rest_schemas(); | ||
| } | ||
| } | ||
|
|
||
| impl Default for EmmyLuaAnalysis { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -261,6 +261,15 @@ impl WorkspaceManager { | |
|
|
||
| is_workspace_file | ||
| } | ||
|
|
||
| pub async fn check_schema_update(&self) { | ||
| let read_analysis = self.analysis.read().await; | ||
| if read_analysis.check_schema_update() { | ||
| drop(read_analysis); | ||
| let mut write_analysis = self.analysis.write().await; | ||
| write_analysis.update_schema().await; | ||
| } | ||
|
Comment on lines
+266
to
+271
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The current pattern of dropping a read lock to acquire a write lock can introduce a race condition. Another thread could perform the schema update in the small window between the read lock being released and the write lock being acquired, leading to redundant work. To prevent this, it's a good practice to re-check the condition after acquiring the write lock. if self.analysis.read().await.check_schema_update() {
let mut write_analysis = self.analysis.write().await;
// Re-check after acquiring write lock to avoid race condition.
if write_analysis.check_schema_update() {
write_analysis.update_schema().await;
}
} |
||
| } | ||
| } | ||
|
|
||
| pub fn load_emmy_config(config_root: Option<PathBuf>, client_config: ClientConfig) -> Arc<Emmyrc> { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The current implementation doesn't check the HTTP status code of the response from
reqwest. This means it might try to parse an error page (e.g., for a 404 Not Found) as a JSON schema, which will fail later during conversion. It's important to check if the request was successful (i.e., has a 2xx status code) before attempting to process the response body.