summaryrefslogtreecommitdiffstats
path: root/generator/plugins
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-14 19:55:48 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-14 19:55:48 +0000
commit8be448d3881909fb0ce4b033cad71aa7575de0aa (patch)
treeda33caff06645347a08c3c9c56dd703e4acb5aa3 /generator/plugins
parentInitial commit. (diff)
downloadlsprotocol-8be448d3881909fb0ce4b033cad71aa7575de0aa.tar.xz
lsprotocol-8be448d3881909fb0ce4b033cad71aa7575de0aa.zip
Adding upstream version 2023.0.0.upstream/2023.0.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'generator/plugins')
-rw-r--r--generator/plugins/__init__.py2
-rw-r--r--generator/plugins/dotnet/__init__.py4
-rw-r--r--generator/plugins/dotnet/custom/CustomArrayConverter.cs42
-rw-r--r--generator/plugins/dotnet/custom/CustomObjectConverter.cs40
-rw-r--r--generator/plugins/dotnet/custom/CustomStringConverter.cs40
-rw-r--r--generator/plugins/dotnet/custom/Direction.cs12
-rw-r--r--generator/plugins/dotnet/custom/DocumentSelectorConverter.cs34
-rw-r--r--generator/plugins/dotnet/custom/IMessage.cs4
-rw-r--r--generator/plugins/dotnet/custom/INotification.cs6
-rw-r--r--generator/plugins/dotnet/custom/IOrType.cs4
-rw-r--r--generator/plugins/dotnet/custom/IPartialResultParams.cs15
-rw-r--r--generator/plugins/dotnet/custom/IRequest.cs9
-rw-r--r--generator/plugins/dotnet/custom/IResponse.cs9
-rw-r--r--generator/plugins/dotnet/custom/LSPAnyConverter.cs61
-rw-r--r--generator/plugins/dotnet/custom/LSPRequest.cs22
-rw-r--r--generator/plugins/dotnet/custom/LSPResponse.cs13
-rw-r--r--generator/plugins/dotnet/custom/MessageDirection.cs8
-rw-r--r--generator/plugins/dotnet/custom/OrType.cs138
-rw-r--r--generator/plugins/dotnet/custom/OrTypeArrayConverter.cs145
-rw-r--r--generator/plugins/dotnet/custom/OrTypeConverter.cs595
-rw-r--r--generator/plugins/dotnet/custom/Proposed.cs17
-rw-r--r--generator/plugins/dotnet/custom/ResponseError.cs27
-rw-r--r--generator/plugins/dotnet/custom/Since.cs17
-rw-r--r--generator/plugins/dotnet/custom/Validators.cs20
-rw-r--r--generator/plugins/dotnet/dotnet_classes.py1024
-rw-r--r--generator/plugins/dotnet/dotnet_commons.py60
-rw-r--r--generator/plugins/dotnet/dotnet_constants.py5
-rw-r--r--generator/plugins/dotnet/dotnet_enums.py51
-rw-r--r--generator/plugins/dotnet/dotnet_helpers.py215
-rw-r--r--generator/plugins/dotnet/dotnet_special_classes.py158
-rw-r--r--generator/plugins/dotnet/dotnet_utils.py58
-rw-r--r--generator/plugins/python/__init__.py4
-rw-r--r--generator/plugins/python/utils.py1122
-rw-r--r--generator/plugins/rust/__init__.py4
-rw-r--r--generator/plugins/rust/rust_commons.py681
-rw-r--r--generator/plugins/rust/rust_constants.py2
-rw-r--r--generator/plugins/rust/rust_enum.py51
-rw-r--r--generator/plugins/rust/rust_file_header.py15
-rw-r--r--generator/plugins/rust/rust_lang_utils.py73
-rw-r--r--generator/plugins/rust/rust_structs.py460
-rw-r--r--generator/plugins/rust/rust_utils.py68
-rw-r--r--generator/plugins/testdata/__init__.py4
-rw-r--r--generator/plugins/testdata/testdata_generator.py498
-rw-r--r--generator/plugins/testdata/testdata_utils.py34
44 files changed, 5871 insertions, 0 deletions
diff --git a/generator/plugins/__init__.py b/generator/plugins/__init__.py
new file mode 100644
index 0000000..5b7f7a9
--- /dev/null
+++ b/generator/plugins/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
diff --git a/generator/plugins/dotnet/__init__.py b/generator/plugins/dotnet/__init__.py
new file mode 100644
index 0000000..8c6065b
--- /dev/null
+++ b/generator/plugins/dotnet/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from .dotnet_utils import generate_from_spec as generate
diff --git a/generator/plugins/dotnet/custom/CustomArrayConverter.cs b/generator/plugins/dotnet/custom/CustomArrayConverter.cs
new file mode 100644
index 0000000..d34eb62
--- /dev/null
+++ b/generator/plugins/dotnet/custom/CustomArrayConverter.cs
@@ -0,0 +1,42 @@
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System.Collections.Immutable;
+
+public class CustomArrayConverter<T> : JsonConverter<ImmutableArray<T>>
+{
+ public override ImmutableArray<T> ReadJson(JsonReader reader, Type objectType, ImmutableArray<T> existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return default(ImmutableArray<T>);
+ }
+
+ JArray array = JArray.Load(reader);
+ ImmutableArray<T>.Builder builder = ImmutableArray.CreateBuilder<T>();
+
+ for (int i = 0; i < array.Count; i++)
+ {
+ builder.Add((T)array[i].ToObject(typeof(T))!);
+ }
+
+ return builder.ToImmutable();
+
+ }
+
+ public override void WriteJson(JsonWriter writer, ImmutableArray<T> value, JsonSerializer serializer)
+ {
+ if (value.IsDefault)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ writer.WriteStartArray();
+ foreach (var item in value)
+ {
+ serializer.Serialize(writer, item);
+ }
+ writer.WriteEndArray();
+ }
+ }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/CustomObjectConverter.cs b/generator/plugins/dotnet/custom/CustomObjectConverter.cs
new file mode 100644
index 0000000..e03b588
--- /dev/null
+++ b/generator/plugins/dotnet/custom/CustomObjectConverter.cs
@@ -0,0 +1,40 @@
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
+
+
+class CustomObjectConverter<T> : JsonConverter<T> where T : Dictionary<string, object?>
+{
+ public override T ReadJson(JsonReader reader, Type objectType, T? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return default(T)!;
+ }
+
+ Dictionary<string, object?>? o = serializer.Deserialize<Dictionary<string, object?>>(reader);
+ if (o == null)
+ {
+ return default(T)!;
+ }
+ return (T)Activator.CreateInstance(typeof(T), o)! ?? default(T)!;
+ }
+
+ public override void WriteJson(JsonWriter writer, T? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ writer.WriteStartObject();
+ foreach (var kvp in value)
+ {
+ writer.WritePropertyName(kvp.Key);
+ serializer.Serialize(writer, kvp.Value);
+ }
+ writer.WriteEndObject();
+ }
+ }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/CustomStringConverter.cs b/generator/plugins/dotnet/custom/CustomStringConverter.cs
new file mode 100644
index 0000000..33a04df
--- /dev/null
+++ b/generator/plugins/dotnet/custom/CustomStringConverter.cs
@@ -0,0 +1,40 @@
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
+
+public class CustomStringConverter<T> : JsonConverter<T> where T : class
+{
+ public override T? ReadJson(JsonReader reader, Type objectType, T? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.String && reader.Value is string str)
+ {
+ return Activator.CreateInstance(typeof(T), str) as T;
+ }
+ else if (reader.TokenType == JsonToken.Null)
+ {
+ return null;
+ }
+
+ throw new JsonSerializationException($"Unexpected token type '{reader.TokenType}' while deserializing '{objectType.Name}'.");
+ }
+
+ public override void WriteJson(JsonWriter writer, T? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else if (value is Uri u)
+ {
+ writer.WriteValue(u.AbsoluteUri);
+ }
+ else if (value is T t)
+ {
+ writer.WriteValue(t.ToString());
+ }
+ else
+ {
+ throw new ArgumentException($"{nameof(value)} must be of type {nameof(T)}.");
+ }
+ }
+}
diff --git a/generator/plugins/dotnet/custom/Direction.cs b/generator/plugins/dotnet/custom/Direction.cs
new file mode 100644
index 0000000..757acd3
--- /dev/null
+++ b/generator/plugins/dotnet/custom/Direction.cs
@@ -0,0 +1,12 @@
+using System;
+
+[AttributeUsage(AttributeTargets.Class | AttributeTargets.Property | AttributeTargets.Enum)]
+public class DirectionAttribute : Attribute
+{
+ public DirectionAttribute(MessageDirection direction)
+ {
+ Direction = direction;
+ }
+
+ public MessageDirection Direction { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/DocumentSelectorConverter.cs b/generator/plugins/dotnet/custom/DocumentSelectorConverter.cs
new file mode 100644
index 0000000..3124416
--- /dev/null
+++ b/generator/plugins/dotnet/custom/DocumentSelectorConverter.cs
@@ -0,0 +1,34 @@
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
+
+public class DocumentSelectorConverter : JsonConverter<DocumentSelector>
+{
+ public override void WriteJson(JsonWriter writer, DocumentSelector? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ serializer.Serialize(writer, (DocumentFilter[])value);
+ }
+ }
+
+ public override DocumentSelector ReadJson(JsonReader reader, Type objectType, DocumentSelector? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return null!;
+ }
+ var token = JToken.Load(reader);
+ if (token.Type == JTokenType.Array)
+ {
+ var filters = token.ToObject<DocumentFilter[]>(serializer);
+ return new DocumentSelector(filters ?? Array.Empty<DocumentFilter>());
+ }
+
+ throw new JsonSerializationException("Invalid JSON for DocumentSelector");
+ }
+}
diff --git a/generator/plugins/dotnet/custom/IMessage.cs b/generator/plugins/dotnet/custom/IMessage.cs
new file mode 100644
index 0000000..175c258
--- /dev/null
+++ b/generator/plugins/dotnet/custom/IMessage.cs
@@ -0,0 +1,4 @@
+public interface IMessage
+{
+ string JsonRPC { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/INotification.cs b/generator/plugins/dotnet/custom/INotification.cs
new file mode 100644
index 0000000..b287767
--- /dev/null
+++ b/generator/plugins/dotnet/custom/INotification.cs
@@ -0,0 +1,6 @@
+public interface INotification<TParams> : IMessage
+{
+ string Method { get; }
+
+ TParams? Params { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/IOrType.cs b/generator/plugins/dotnet/custom/IOrType.cs
new file mode 100644
index 0000000..217b64a
--- /dev/null
+++ b/generator/plugins/dotnet/custom/IOrType.cs
@@ -0,0 +1,4 @@
+public interface IOrType
+{
+ public object? Value { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/IPartialResultParams.cs b/generator/plugins/dotnet/custom/IPartialResultParams.cs
new file mode 100644
index 0000000..ef90c3b
--- /dev/null
+++ b/generator/plugins/dotnet/custom/IPartialResultParams.cs
@@ -0,0 +1,15 @@
+using System;
+
+/// <summary>
+/// Interface to describe parameters for requests that support streaming results.
+///
+/// See the <see href="https://microsoft.github.io/language-server-protocol/specifications/specification-current/#partialResultParams">Language Server Protocol specification</see> for additional information.
+/// </summary>
+/// <typeparam name="T">The type to be reported by <see cref="PartialResultToken"/>.</typeparam>
+public interface IPartialResultParams
+{
+ /// <summary>
+ /// An optional token that a server can use to report partial results (e.g. streaming) to the client.
+ /// </summary>
+ public ProgressToken? PartialResultToken { get; set; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/IRequest.cs b/generator/plugins/dotnet/custom/IRequest.cs
new file mode 100644
index 0000000..55fcbd8
--- /dev/null
+++ b/generator/plugins/dotnet/custom/IRequest.cs
@@ -0,0 +1,9 @@
+public interface IRequest<TParams> : IMessage
+{
+
+ OrType<string, int> Id { get; }
+
+ string Method { get; }
+
+ TParams Params { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/IResponse.cs b/generator/plugins/dotnet/custom/IResponse.cs
new file mode 100644
index 0000000..01ecf3e
--- /dev/null
+++ b/generator/plugins/dotnet/custom/IResponse.cs
@@ -0,0 +1,9 @@
+public interface IResponse<TResponse> : IMessage
+{
+
+ OrType<string, int> Id { get; }
+
+ TResponse? Result { get; }
+
+ ResponseError? Error { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/LSPAnyConverter.cs b/generator/plugins/dotnet/custom/LSPAnyConverter.cs
new file mode 100644
index 0000000..28781cb
--- /dev/null
+++ b/generator/plugins/dotnet/custom/LSPAnyConverter.cs
@@ -0,0 +1,61 @@
+using Newtonsoft.Json;
+
+public class LSPAnyConverter : JsonConverter
+{
+ public override bool CanConvert(Type objectType)
+ {
+ return objectType == typeof(LSPAny);
+ }
+
+ public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
+ {
+ reader = reader ?? throw new ArgumentNullException(nameof(reader));
+ switch (reader.TokenType)
+ {
+ case JsonToken.Null:
+ return null;
+
+ case JsonToken.Integer:
+ return new LSPAny(serializer.Deserialize<long>(reader));
+
+ case JsonToken.Float:
+ return new LSPAny(serializer.Deserialize<float>(reader));
+
+ case JsonToken.Boolean:
+ return new LSPAny(serializer.Deserialize<bool>(reader));
+
+ case JsonToken.String:
+ return new LSPAny(serializer.Deserialize<string>(reader));
+
+ case JsonToken.StartArray:
+ List<object>? l = serializer.Deserialize<List<object>>(reader);
+ if (l == null)
+ {
+ return null;
+ }
+ return new LSPAny(new LSPArray(l));
+
+ case JsonToken.StartObject:
+ Dictionary<string, object?>? o = serializer.Deserialize<Dictionary<string, object?>>(reader);
+ if (o == null)
+ {
+ return null;
+ }
+ return new LSPAny(new LSPObject(o));
+ }
+
+ throw new JsonSerializationException($"Unexpected token type '{reader.TokenType}' while deserializing '{objectType.Name}'.");
+ }
+
+ public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ serializer.Serialize(writer, ((LSPAny)value).Value);
+ }
+ }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/LSPRequest.cs b/generator/plugins/dotnet/custom/LSPRequest.cs
new file mode 100644
index 0000000..69e3f0d
--- /dev/null
+++ b/generator/plugins/dotnet/custom/LSPRequest.cs
@@ -0,0 +1,22 @@
+using System;
+
+[AttributeUsage(AttributeTargets.Class)]
+public class LSPRequestAttribute : Attribute
+{
+ public LSPRequestAttribute(string method, Type response)
+ {
+ Method = method;
+ Response = response;
+ }
+
+ public LSPRequestAttribute(string method, Type response, Type partialResponse)
+ {
+ Method = method;
+ Response = response;
+ PartialResponse = partialResponse;
+ }
+
+ public string Method { get; }
+ public Type Response { get; }
+ public Type? PartialResponse { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/LSPResponse.cs b/generator/plugins/dotnet/custom/LSPResponse.cs
new file mode 100644
index 0000000..4d2ca46
--- /dev/null
+++ b/generator/plugins/dotnet/custom/LSPResponse.cs
@@ -0,0 +1,13 @@
+using System;
+
+[AttributeUsage(AttributeTargets.Class)]
+public class LSPResponseAttribute : Attribute
+{
+ public LSPResponseAttribute(Type request)
+ {
+ Request = request;
+ }
+
+
+ public Type Request { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/MessageDirection.cs b/generator/plugins/dotnet/custom/MessageDirection.cs
new file mode 100644
index 0000000..a2792ff
--- /dev/null
+++ b/generator/plugins/dotnet/custom/MessageDirection.cs
@@ -0,0 +1,8 @@
+using System.Runtime.Serialization;
+
+public enum MessageDirection
+{
+ [EnumMember(Value = "serverToClient")] ServerToClient,
+ [EnumMember(Value = "clientToServer")] ClientToServer,
+ [EnumMember(Value = "both")] Both,
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/OrType.cs b/generator/plugins/dotnet/custom/OrType.cs
new file mode 100644
index 0000000..2ddf320
--- /dev/null
+++ b/generator/plugins/dotnet/custom/OrType.cs
@@ -0,0 +1,138 @@
+using System;
+
+public record OrType<T, U> : IOrType
+{
+ public object? Value { get; }
+ public OrType(T t)
+ {
+ Value = t ?? throw new ArgumentNullException(nameof(t));
+ }
+
+ public OrType(U u)
+ {
+ Value = u ?? throw new ArgumentNullException(nameof(u));
+ }
+
+ public static explicit operator U?(OrType<T, U> obj)
+ {
+ return obj.Value is U x ? x : default;
+ }
+
+ public static explicit operator T?(OrType<T, U> obj)
+ {
+ return obj.Value is T x ? x : default;
+ }
+
+ public static explicit operator OrType<T, U>(U obj) => obj is null ? null! : new OrType<T, U>(obj);
+ public static explicit operator OrType<T, U>(T obj) => obj is null ? null! : new OrType<T, U>(obj);
+
+ public override string ToString()
+ {
+ return Value?.ToString()!;
+ }
+}
+
+public record OrType<T, U, V> : IOrType
+{
+ public object? Value { get; }
+
+ public OrType(T t)
+ {
+ Value = t ?? throw new ArgumentNullException(nameof(t));
+ }
+
+ public OrType(U u)
+ {
+ Value = u ?? throw new ArgumentNullException(nameof(u));
+ }
+
+ public OrType(V v)
+ {
+ Value = v ?? throw new ArgumentNullException(nameof(v));
+ }
+
+ public static explicit operator U?(OrType<T, U, V> obj)
+ {
+ return obj.Value is U x ? x : default;
+ }
+
+ public static explicit operator T?(OrType<T, U, V> obj)
+ {
+ return obj.Value is T x ? x : default;
+ }
+
+ public static explicit operator V?(OrType<T, U, V> obj)
+ {
+ return obj.Value is V x ? x : default;
+ }
+
+ public static explicit operator OrType<T, U, V>(U obj) => obj is null ? null! : new OrType<T, U, V>(obj);
+
+ public static explicit operator OrType<T, U, V>(T obj) => obj is null ? null! : new OrType<T, U, V>(obj);
+
+ public static explicit operator OrType<T, U, V>(V obj) => obj is null ? null! : new OrType<T, U, V>(obj);
+
+ public override string ToString()
+ {
+ return Value?.ToString()!;
+ }
+}
+
+
+public record OrType<T, U, V, W> : IOrType
+{
+ public object? Value { get; }
+
+ public OrType(T t)
+ {
+ Value = t ?? throw new ArgumentNullException(nameof(t));
+ }
+
+ public OrType(U u)
+ {
+ Value = u ?? throw new ArgumentNullException(nameof(u));
+ }
+
+ public OrType(V v)
+ {
+ Value = v ?? throw new ArgumentNullException(nameof(v));
+ }
+
+ public OrType(W w)
+ {
+ Value = w ?? throw new ArgumentNullException(nameof(w));
+ }
+
+ public static explicit operator U?(OrType<T, U, V, W> obj)
+ {
+ return obj.Value is U x ? x : default;
+ }
+
+ public static explicit operator T?(OrType<T, U, V, W> obj)
+ {
+ return obj.Value is T x ? x : default;
+ }
+
+ public static explicit operator V?(OrType<T, U, V, W> obj)
+ {
+ return obj.Value is V x ? x : default;
+ }
+
+ public static explicit operator W?(OrType<T, U, V, W> obj)
+ {
+ return obj.Value is W x ? x : default;
+ }
+
+ public static explicit operator OrType<T, U, V, W>(U obj) => obj is null ? null! : new OrType<T, U, V, W>(obj);
+
+ public static explicit operator OrType<T, U, V, W>(T obj) => obj is null ? null! : new OrType<T, U, V, W>(obj);
+
+ public static explicit operator OrType<T, U, V, W>(V obj) => obj is null ? null! : new OrType<T, U, V, W>(obj);
+
+ public static explicit operator OrType<T, U, V, W>(W obj) => obj is null ? null! : new OrType<T, U, V, W>(obj);
+
+ public override string ToString()
+ {
+ return Value?.ToString()!;
+ }
+}
diff --git a/generator/plugins/dotnet/custom/OrTypeArrayConverter.cs b/generator/plugins/dotnet/custom/OrTypeArrayConverter.cs
new file mode 100644
index 0000000..f2dbf23
--- /dev/null
+++ b/generator/plugins/dotnet/custom/OrTypeArrayConverter.cs
@@ -0,0 +1,145 @@
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
+using System.Collections.Immutable;
+
+public class OrTypeArrayConverter<T, U> : JsonConverter<ImmutableArray<OrType<T, U>>>
+{
+ private OrTypeConverter<T, U> _converter;
+
+ public OrTypeArrayConverter()
+ {
+ _converter = new OrTypeConverter<T, U>();
+ }
+
+ public override ImmutableArray<OrType<T, U>> ReadJson(JsonReader reader, Type objectType, ImmutableArray<OrType<T, U>> existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return default(ImmutableArray<OrType<T, U>>);
+ }
+
+ JArray array = JArray.Load(reader);
+ ImmutableArray<OrType<T, U>>.Builder builder = ImmutableArray.CreateBuilder<OrType<T, U>>();
+
+ for (int i = 0; i < array.Count; i++)
+ {
+ builder.Add((OrType<T, U>)_converter.ReadJson(array[i].CreateReader(), typeof(OrType<T, U>), null, serializer)!);
+ }
+
+ return builder.ToImmutable();
+ }
+
+ public override void WriteJson(JsonWriter writer, ImmutableArray<OrType<T, U>> value, JsonSerializer serializer)
+ {
+ if (value.IsDefault)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ writer.WriteStartArray();
+
+ foreach (var item in value)
+ {
+ _converter.WriteJson(writer, item, serializer);
+ }
+
+ writer.WriteEndArray();
+ }
+ }
+}
+public class OrTypeArrayConverter<T, U, V> : JsonConverter<ImmutableArray<OrType<T, U, V>>>
+{
+ private OrTypeConverter<T, U, V> _converter;
+
+ public OrTypeArrayConverter()
+ {
+ _converter = new OrTypeConverter<T, U, V>();
+ }
+
+ public override ImmutableArray<OrType<T, U, V>> ReadJson(JsonReader reader, Type objectType, ImmutableArray<OrType<T, U, V>> existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return default(ImmutableArray<OrType<T, U, V>>);
+ }
+
+ JArray array = JArray.Load(reader);
+ ImmutableArray<OrType<T, U, V>>.Builder builder = ImmutableArray.CreateBuilder<OrType<T, U, V>>();
+
+ for (int i = 0; i < array.Count; i++)
+ {
+ builder.Add((OrType<T, U, V>)_converter.ReadJson(array[i].CreateReader(), typeof(OrType<T, U, V>), null, serializer)!);
+ }
+
+ return builder.ToImmutable();
+ }
+
+ public override void WriteJson(JsonWriter writer, ImmutableArray<OrType<T, U, V>> value, JsonSerializer serializer)
+ {
+ if (value.IsDefault)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ writer.WriteStartArray();
+
+ foreach (var item in value)
+ {
+ _converter.WriteJson(writer, item, serializer);
+ }
+
+ writer.WriteEndArray();
+ }
+ }
+}
+
+
+public class OrTypeArrayConverter<T, U, V, W> : JsonConverter<ImmutableArray<OrType<T, U, V, W>>>
+{
+ private OrTypeConverter<T, U, V, W> _converter;
+
+ public OrTypeArrayConverter()
+ {
+ _converter = new OrTypeConverter<T, U, V, W>();
+ }
+
+ public override ImmutableArray<OrType<T, U, V, W>> ReadJson(JsonReader reader, Type objectType, ImmutableArray<OrType<T, U, V, W>> existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return default(ImmutableArray<OrType<T, U, V, W>>);
+ }
+
+ JArray array = JArray.Load(reader);
+ ImmutableArray<OrType<T, U, V, W>>.Builder builder = ImmutableArray.CreateBuilder<OrType<T, U, V, W>>();
+
+ for (int i = 0; i < array.Count; i++)
+ {
+ builder.Add((OrType<T, U, V, W>)_converter.ReadJson(array[i].CreateReader(), typeof(OrType<T, U, V, W>), null, serializer)!);
+ }
+
+ return builder.ToImmutable();
+ }
+
+ public override void WriteJson(JsonWriter writer, ImmutableArray<OrType<T, U, V, W>> value, JsonSerializer serializer)
+ {
+ if (value.IsDefault)
+ {
+ writer.WriteNull();
+ }
+ else
+ {
+ writer.WriteStartArray();
+
+ foreach (var item in value)
+ {
+ _converter.WriteJson(writer, item, serializer);
+ }
+
+ writer.WriteEndArray();
+ }
+ }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/OrTypeConverter.cs b/generator/plugins/dotnet/custom/OrTypeConverter.cs
new file mode 100644
index 0000000..f2aadbf
--- /dev/null
+++ b/generator/plugins/dotnet/custom/OrTypeConverter.cs
@@ -0,0 +1,595 @@
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+internal class OrTypeConverterHelpers
+{
+ public static Type[] SortTypesByHeuristic(Type[] types, JToken jToken)
+ {
+ var typePropertyScores = new Dictionary<Type, int>();
+
+ string[] jTokenPropertyNames = jToken.Children<JProperty>().Select(p => p.Name.ToUpper()).ToArray();
+
+ foreach (Type type in types)
+ {
+ string[] typePropertyNames = type.GetProperties().Select(p => p.Name.ToUpper()).ToArray();
+
+ int score = jTokenPropertyNames.Count(propertyName => typePropertyNames.Contains(propertyName));
+ typePropertyScores[type] = score;
+ }
+
+ return types.OrderByDescending(type => typePropertyScores[type]).ToArray();
+ }
+}
+
+public class OrTypeConverter<T, U> : JsonConverter<OrType<T, U>>
+{
+ public override OrType<T, U>? ReadJson(JsonReader reader, Type objectType, OrType<T, U>? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ reader = reader ?? throw new ArgumentNullException(nameof(reader));
+
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return null;
+ }
+
+ Type[] types = new Type[] { typeof(T), typeof(U) };
+
+ if (reader.TokenType == JsonToken.Integer && (Validators.HasType(types, typeof(uint)) || Validators.HasType(types, typeof(int))))
+ {
+ return ReadIntegerToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.Float && Validators.HasType(types, typeof(float)))
+ {
+ return ReadFloatToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.Boolean && Validators.HasType(types, typeof(bool)))
+ {
+ return ReadBooleanToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.String && Validators.HasType(types, typeof(string)))
+ {
+ return ReadStringToken(reader, serializer, types);
+ }
+
+ var token = JToken.Load(reader);
+ return OrTypeConverter<T, U>.ReadObjectToken(token, serializer, OrTypeConverterHelpers.SortTypesByHeuristic(types, token));
+ }
+
+ private static OrType<T, U> ReadIntegerToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ long integer = serializer.Deserialize<long>(reader);
+ if (Validators.InUIntegerRange(integer) && Validators.HasType(types, typeof(uint)))
+ {
+ if (typeof(T) == typeof(uint))
+ {
+ return new OrType<T, U>((T)(object)(uint)integer);
+ }
+ if (typeof(U) == typeof(uint))
+ {
+ return new OrType<T, U>((U)(object)(uint)integer);
+ }
+ }
+ if (Validators.InIntegerRange(integer) && Validators.HasType(types, typeof(int)))
+ {
+ if (typeof(T) == typeof(int))
+ {
+ return new OrType<T, U>((T)(object)(int)integer);
+ }
+ if (typeof(U) == typeof(int))
+ {
+ return new OrType<T, U>((U)(object)(int)integer);
+ }
+ }
+ throw new ArgumentOutOfRangeException($"Integer out-of-range of LSP Signed Integer[{int.MinValue}:{int.MaxValue}] and out-of-range of LSP Unsigned Integer [{uint.MinValue}:{uint.MaxValue}] => {integer}");
+ }
+
+ private static OrType<T, U> ReadFloatToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ float real = serializer.Deserialize<float>(reader);
+ if (typeof(T) == typeof(float))
+ {
+ return new OrType<T, U>((T)(object)real);
+ }
+ if (typeof(U) == typeof(float))
+ {
+ return new OrType<T, U>((U)(object)real);
+ }
+ throw new InvalidOperationException("Invalid token type for float");
+ }
+
+ private static OrType<T, U> ReadBooleanToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ bool boolean = serializer.Deserialize<bool>(reader);
+ if (typeof(T) == typeof(bool))
+ {
+ return new OrType<T, U>((T)(object)boolean);
+ }
+ if (typeof(U) == typeof(bool))
+ {
+ return new OrType<T, U>((U)(object)boolean);
+ }
+ throw new InvalidOperationException("Invalid token type for boolean");
+ }
+
+ private static OrType<T, U> ReadStringToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ string str = serializer.Deserialize<string>(reader)!;
+ if (typeof(T) == typeof(string))
+ {
+ return new OrType<T, U>((T)(object)str);
+ }
+ if (typeof(U) == typeof(string))
+ {
+ return new OrType<T, U>((U)(object)str);
+ }
+ throw new InvalidOperationException("Invalid token type for string");
+ }
+
+ private static OrType<T, U> ReadObjectToken(JToken token, JsonSerializer serializer, Type[] types)
+ {
+ var exceptions = new List<Exception>();
+ foreach (Type type in types)
+ {
+ try
+ {
+ object? value = null;
+ if (token.Type == JTokenType.Array && type == typeof((uint, uint)))
+ {
+ uint[]? o = token.ToObject<uint[]>(serializer);
+ if (o != null)
+ {
+ value = (o[0], o[1]);
+ }
+ }
+ else
+ {
+ value = token.ToObject(type, serializer);
+ }
+
+ if (value != null)
+ {
+ if (value is T t)
+ {
+ return new OrType<T, U>(t);
+ }
+ if (value is U u)
+ {
+ return new OrType<T, U>(u);
+ }
+ }
+ }
+ catch (Exception ex)
+ {
+ exceptions.Add(ex);
+ continue;
+ }
+ }
+
+ throw new JsonSerializationException("Unable to deserialize object", new AggregateException(exceptions));
+ }
+
+
+
+ public override void WriteJson(JsonWriter writer, OrType<T, U>? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else if (value?.Value?.GetType() == typeof((uint, uint)))
+ {
+ ValueTuple<uint, uint> o = (ValueTuple<uint, uint>)(value.Value);
+ serializer.Serialize(writer, new uint[] { o.Item1, o.Item2 });
+ }
+ else
+ {
+ serializer.Serialize(writer, value?.Value);
+ }
+ }
+}
+
+public class OrTypeConverter<T, U, V> : JsonConverter<OrType<T, U, V>>
+{
+ public override OrType<T, U, V>? ReadJson(JsonReader reader, Type objectType, OrType<T, U, V>? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ reader = reader ?? throw new ArgumentNullException(nameof(reader));
+
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return null;
+ }
+
+ Type[] types = new Type[] { typeof(T), typeof(U), typeof(V) };
+
+ if (reader.TokenType == JsonToken.Integer && (Validators.HasType(types, typeof(uint)) || Validators.HasType(types, typeof(int))))
+ {
+ return ReadIntegerToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.Float && Validators.HasType(types, typeof(float)))
+ {
+ return ReadFloatToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.Boolean && Validators.HasType(types, typeof(bool)))
+ {
+ return ReadBooleanToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.String && Validators.HasType(types, typeof(string)))
+ {
+ return ReadStringToken(reader, serializer, types);
+ }
+
+ var token = JToken.Load(reader);
+ return OrTypeConverter<T, U, V>.ReadObjectToken(token, serializer, OrTypeConverterHelpers.SortTypesByHeuristic(types, token));
+ }
+
+ private static OrType<T, U, V> ReadIntegerToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ long integer = serializer.Deserialize<long>(reader);
+ if (Validators.InUIntegerRange(integer) && Validators.HasType(types, typeof(uint)))
+ {
+ if (typeof(T) == typeof(uint))
+ {
+ return new OrType<T, U, V>((T)(object)(uint)integer);
+ }
+ if (typeof(U) == typeof(uint))
+ {
+ return new OrType<T, U, V>((U)(object)(uint)integer);
+ }
+ if (typeof(V) == typeof(uint))
+ {
+ return new OrType<T, U, V>((V)(object)(uint)integer);
+ }
+ }
+ if (Validators.InIntegerRange(integer) && Validators.HasType(types, typeof(int)))
+ {
+ if (typeof(T) == typeof(int))
+ {
+ return new OrType<T, U, V>((T)(object)(int)integer);
+ }
+ if (typeof(U) == typeof(int))
+ {
+ return new OrType<T, U, V>((U)(object)(int)integer);
+ }
+ if (typeof(V) == typeof(int))
+ {
+ return new OrType<T, U, V>((V)(object)(int)integer);
+ }
+ }
+ throw new ArgumentOutOfRangeException($"Integer out-of-range of LSP Signed Integer[{int.MinValue}:{int.MaxValue}] and out-of-range of LSP Unsigned Integer [{uint.MinValue}:{uint.MaxValue}] => {integer}");
+ }
+
+ private static OrType<T, U, V> ReadFloatToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ float real = serializer.Deserialize<float>(reader);
+ if (typeof(T) == typeof(float))
+ {
+ return new OrType<T, U, V>((T)(object)real);
+ }
+ if (typeof(U) == typeof(float))
+ {
+ return new OrType<T, U, V>((U)(object)real);
+ }
+ if (typeof(V) == typeof(float))
+ {
+ return new OrType<T, U, V>((V)(object)real);
+ }
+ throw new InvalidOperationException("Invalid token type for float");
+ }
+
+ private static OrType<T, U, V> ReadBooleanToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ bool boolean = serializer.Deserialize<bool>(reader);
+ if (typeof(T) == typeof(bool))
+ {
+ return new OrType<T, U, V>((T)(object)boolean);
+ }
+ if (typeof(U) == typeof(bool))
+ {
+ return new OrType<T, U, V>((U)(object)boolean);
+ }
+ if (typeof(V) == typeof(bool))
+ {
+ return new OrType<T, U, V>((V)(object)boolean);
+ }
+ throw new InvalidOperationException("Invalid token type for boolean");
+ }
+
+ private static OrType<T, U, V> ReadStringToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ string str = serializer.Deserialize<string>(reader)!;
+ if (typeof(T) == typeof(string))
+ {
+ return new OrType<T, U, V>((T)(object)str);
+ }
+ if (typeof(U) == typeof(string))
+ {
+ return new OrType<T, U, V>((U)(object)str);
+ }
+ if (typeof(V) == typeof(string))
+ {
+ return new OrType<T, U, V>((V)(object)str);
+ }
+ throw new InvalidOperationException("Invalid token type for string");
+ }
+
+ private static OrType<T, U, V> ReadObjectToken(JToken token, JsonSerializer serializer, Type[] types)
+ {
+ var exceptions = new List<Exception>();
+ foreach (Type type in types)
+ {
+ try
+ {
+ object? value = null;
+ if (token.Type == JTokenType.Array && type == typeof((uint, uint)))
+ {
+ uint[]? o = token.ToObject<uint[]>(serializer);
+ if (o != null)
+ {
+ value = (o[0], o[1]);
+ }
+ }
+ else
+ {
+ value = token.ToObject(type, serializer);
+ }
+
+ if (value != null)
+ {
+ if (value is T t)
+ {
+ return new OrType<T, U, V>(t);
+ }
+ if (value is U u)
+ {
+ return new OrType<T, U, V>(u);
+ }
+ if (value is V v)
+ {
+ return new OrType<T, U, V>(v);
+ }
+ }
+ }
+ catch (Exception ex)
+ {
+ exceptions.Add(ex);
+ continue;
+ }
+ }
+
+ throw new JsonSerializationException("Unable to deserialize object", new AggregateException(exceptions));
+ }
+
+ public override void WriteJson(JsonWriter writer, OrType<T, U, V>? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else if (value?.Value?.GetType() == typeof((uint, uint)))
+ {
+ ValueTuple<uint, uint> o = (ValueTuple<uint, uint>)(value.Value);
+ serializer.Serialize(writer, new uint[] { o.Item1, o.Item2 });
+ }
+ else
+ {
+ serializer.Serialize(writer, value?.Value);
+ }
+ }
+}
+
+public class OrTypeConverter<T, U, V, W> : JsonConverter<OrType<T, U, V, W>>
+{
+ public override OrType<T, U, V, W>? ReadJson(JsonReader reader, Type objectType, OrType<T, U, V, W>? existingValue, bool hasExistingValue, JsonSerializer serializer)
+ {
+ reader = reader ?? throw new ArgumentNullException(nameof(reader));
+
+ if (reader.TokenType == JsonToken.Null)
+ {
+ return null;
+ }
+
+ Type[] types = new Type[] { typeof(T), typeof(U), typeof(V), typeof(W) };
+
+ if (reader.TokenType == JsonToken.Integer && (Validators.HasType(types, typeof(uint)) || Validators.HasType(types, typeof(int))))
+ {
+ return ReadIntegerToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.Float && Validators.HasType(types, typeof(float)))
+ {
+ return ReadFloatToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.Boolean && Validators.HasType(types, typeof(bool)))
+ {
+ return ReadBooleanToken(reader, serializer, types);
+ }
+ if (reader.TokenType == JsonToken.String && Validators.HasType(types, typeof(string)))
+ {
+ return ReadStringToken(reader, serializer, types);
+ }
+
+ var token = JToken.Load(reader);
+ return OrTypeConverter<T, U, V, W>.ReadObjectToken(token, serializer, OrTypeConverterHelpers.SortTypesByHeuristic(types, token));
+ }
+
+ private static OrType<T, U, V, W> ReadIntegerToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ long integer = serializer.Deserialize<long>(reader);
+ if (Validators.InUIntegerRange(integer) && Validators.HasType(types, typeof(uint)))
+ {
+ if (typeof(T) == typeof(uint))
+ {
+ return new OrType<T, U, V, W>((T)(object)(uint)integer);
+ }
+ if (typeof(U) == typeof(uint))
+ {
+ return new OrType<T, U, V, W>((U)(object)(uint)integer);
+ }
+ if (typeof(V) == typeof(uint))
+ {
+ return new OrType<T, U, V, W>((V)(object)(uint)integer);
+ }
+ if (typeof(W) == typeof(uint))
+ {
+ return new OrType<T, U, V, W>((W)(object)(uint)integer);
+ }
+ }
+ if (Validators.InIntegerRange(integer) && Validators.HasType(types, typeof(int)))
+ {
+ if (typeof(T) == typeof(int))
+ {
+ return new OrType<T, U, V, W>((T)(object)(int)integer);
+ }
+ if (typeof(U) == typeof(int))
+ {
+ return new OrType<T, U, V, W>((U)(object)(int)integer);
+ }
+ if (typeof(V) == typeof(int))
+ {
+ return new OrType<T, U, V, W>((V)(object)(int)integer);
+ }
+ if (typeof(W) == typeof(int))
+ {
+ return new OrType<T, U, V, W>((W)(object)(int)integer);
+ }
+ }
+ throw new ArgumentOutOfRangeException($"Integer out-of-range of LSP Signed Integer[{int.MinValue}:{int.MaxValue}] and out-of-range of LSP Unsigned Integer [{uint.MinValue}:{uint.MaxValue}] => {integer}");
+ }
+
+ private static OrType<T, U, V, W> ReadFloatToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ float real = serializer.Deserialize<float>(reader);
+ if (typeof(T) == typeof(float))
+ {
+ return new OrType<T, U, V, W>((T)(object)real);
+ }
+ if (typeof(U) == typeof(float))
+ {
+ return new OrType<T, U, V, W>((U)(object)real);
+ }
+ if (typeof(V) == typeof(float))
+ {
+ return new OrType<T, U, V, W>((V)(object)real);
+ }
+ if (typeof(W) == typeof(float))
+ {
+ return new OrType<T, U, V, W>((W)(object)real);
+ }
+ throw new InvalidOperationException("Invalid token type for float");
+ }
+
+ private static OrType<T, U, V, W> ReadBooleanToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ bool boolean = serializer.Deserialize<bool>(reader);
+ if (typeof(T) == typeof(bool))
+ {
+ return new OrType<T, U, V, W>((T)(object)boolean);
+ }
+ if (typeof(U) == typeof(bool))
+ {
+ return new OrType<T, U, V, W>((U)(object)boolean);
+ }
+ if (typeof(V) == typeof(bool))
+ {
+ return new OrType<T, U, V, W>((V)(object)boolean);
+ }
+ if (typeof(W) == typeof(bool))
+ {
+ return new OrType<T, U, V, W>((W)(object)boolean);
+ }
+ throw new InvalidOperationException("Invalid token type for boolean");
+ }
+
+ private static OrType<T, U, V, W> ReadStringToken(JsonReader reader, JsonSerializer serializer, Type[] types)
+ {
+ string str = serializer.Deserialize<string>(reader)!;
+ if (typeof(T) == typeof(string))
+ {
+ return new OrType<T, U, V, W>((T)(object)str);
+ }
+ if (typeof(U) == typeof(string))
+ {
+ return new OrType<T, U, V, W>((U)(object)str);
+ }
+ if (typeof(V) == typeof(string))
+ {
+ return new OrType<T, U, V, W>((V)(object)str);
+ }
+ if (typeof(W) == typeof(string))
+ {
+ return new OrType<T, U, V, W>((W)(object)str);
+ }
+ throw new InvalidOperationException("Invalid token type for string");
+ }
+
+ private static OrType<T, U, V, W> ReadObjectToken(JToken token, JsonSerializer serializer, Type[] types)
+ {
+ var exceptions = new List<Exception>();
+ foreach (Type type in types)
+ {
+ try
+ {
+ object? value = null;
+ if (token.Type == JTokenType.Array && type == typeof((uint, uint)))
+ {
+ uint[]? o = token.ToObject<uint[]>(serializer);
+ if (o != null)
+ {
+ value = (o[0], o[1]);
+ }
+ }
+ else
+ {
+ value = token.ToObject(type, serializer);
+ }
+
+ if (value != null)
+ {
+ if (value is T t)
+ {
+ return new OrType<T, U, V, W>(t);
+ }
+ if (value is U u)
+ {
+ return new OrType<T, U, V, W>(u);
+ }
+ if (value is V v)
+ {
+ return new OrType<T, U, V, W>(v);
+ }
+ if (value is W w)
+ {
+ return new OrType<T, U, V, W>(w);
+ }
+ }
+
+ }
+ catch (Exception ex)
+ {
+ exceptions.Add(ex);
+ continue;
+ }
+ }
+
+ throw new JsonSerializationException("Unable to deserialize object", new AggregateException(exceptions));
+ }
+
+ public override void WriteJson(JsonWriter writer, OrType<T, U, V, W>? value, JsonSerializer serializer)
+ {
+ if (value is null)
+ {
+ writer.WriteNull();
+ }
+ else if (value?.Value?.GetType() == typeof((uint, uint)))
+ {
+ ValueTuple<uint, uint> o = (ValueTuple<uint, uint>)(value.Value);
+ serializer.Serialize(writer, new uint[] { o.Item1, o.Item2 });
+ }
+ else
+ {
+ serializer.Serialize(writer, value?.Value);
+ }
+ }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/Proposed.cs b/generator/plugins/dotnet/custom/Proposed.cs
new file mode 100644
index 0000000..eb0ed75
--- /dev/null
+++ b/generator/plugins/dotnet/custom/Proposed.cs
@@ -0,0 +1,17 @@
+using System;
+
+[AttributeUsage(AttributeTargets.Class | AttributeTargets.Property | AttributeTargets.Enum)]
+public class ProposedAttribute : Attribute
+{
+ public ProposedAttribute()
+ {
+ Version = null;
+ }
+
+ public ProposedAttribute(string version)
+ {
+ Version = version;
+ }
+
+ public string? Version { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/ResponseError.cs b/generator/plugins/dotnet/custom/ResponseError.cs
new file mode 100644
index 0000000..5151783
--- /dev/null
+++ b/generator/plugins/dotnet/custom/ResponseError.cs
@@ -0,0 +1,27 @@
+using Newtonsoft.Json;
+using System.Runtime.Serialization;
+
+[DataContract]
+public class ResponseError
+{
+ [JsonConstructor]
+ public ResponseError(
+ int code,
+ string message,
+ LSPObject? data = null
+ )
+ {
+ Code = code;
+ Message = message;
+ Data = data;
+ }
+
+ [DataMember(Name = "code")]
+ int Code { get; }
+
+ [DataMember(Name = "message")]
+ string Message { get; }
+
+ [DataMember(Name = "data")]
+ LSPObject? Data { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/Since.cs b/generator/plugins/dotnet/custom/Since.cs
new file mode 100644
index 0000000..b13da74
--- /dev/null
+++ b/generator/plugins/dotnet/custom/Since.cs
@@ -0,0 +1,17 @@
+using System;
+
+[AttributeUsage(AttributeTargets.Class | AttributeTargets.Property | AttributeTargets.Enum | AttributeTargets.Interface)]
+public class SinceAttribute : Attribute
+{
+ public SinceAttribute()
+ {
+ Version = null;
+ }
+
+ public SinceAttribute(string version)
+ {
+ Version = version;
+ }
+
+ public string? Version { get; }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/custom/Validators.cs b/generator/plugins/dotnet/custom/Validators.cs
new file mode 100644
index 0000000..9edc18f
--- /dev/null
+++ b/generator/plugins/dotnet/custom/Validators.cs
@@ -0,0 +1,20 @@
+
+using System;
+
+public static class Validators
+{
+ public static bool HasType(Type[] types, Type type)
+ {
+ return types.Contains(type);
+ }
+
+ public static bool InIntegerRange(long value)
+ {
+ return value >= int.MinValue && value <= int.MaxValue;
+ }
+
+ public static bool InUIntegerRange(long value)
+ {
+ return value >= uint.MinValue && value <= uint.MaxValue;
+ }
+} \ No newline at end of file
diff --git a/generator/plugins/dotnet/dotnet_classes.py b/generator/plugins/dotnet/dotnet_classes.py
new file mode 100644
index 0000000..0a705cd
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_classes.py
@@ -0,0 +1,1024 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import re
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+import cattrs
+
+from generator import model
+
+from .dotnet_commons import TypeData
+from .dotnet_constants import NAMESPACE
+from .dotnet_helpers import (
+ class_wrapper,
+ generate_extras,
+ get_doc,
+ get_special_case_class_name,
+ get_special_case_property_name,
+ get_usings,
+ indent_lines,
+ lsp_method_to_name,
+ namespace_wrapper,
+ to_camel_case,
+ to_upper_camel_case,
+)
+
+ORTYPE_CONVERTER_RE = re.compile(r"OrType<(?P<parts>.*)>")
+IMMUTABLE_ARRAY_CONVERTER_RE = re.compile(r"ImmutableArray<(?P<elements>.*)>")
+
+
+def _get_enum(name: str, spec: model.LSPModel) -> Optional[model.Enum]:
+ for enum in spec.enumerations:
+ if enum.name == name:
+ return enum
+ return None
+
+
+def _get_struct(name: str, spec: model.LSPModel) -> Optional[model.Structure]:
+ for struct in spec.structures:
+ if struct.name == name:
+ return struct
+ return None
+
+
+def _is_str_enum(enum_def: model.Enum) -> bool:
+ return all(isinstance(item.value, str) for item in enum_def.values)
+
+
+def _is_int_enum(enum_def: model.Enum) -> bool:
+ return all(isinstance(item.value, int) for item in enum_def.values)
+
+
+def lsp_to_base_types(lsp_type: model.BaseType):
+ if lsp_type.name in ["string", "RegExp"]:
+ return "string"
+ elif lsp_type.name in ["DocumentUri", "URI"]:
+ return "Uri"
+ elif lsp_type.name in ["decimal"]:
+ return "float"
+ elif lsp_type.name in ["integer"]:
+ return "int"
+ elif lsp_type.name in ["uinteger"]:
+ return "uint"
+ elif lsp_type.name in ["boolean"]:
+ return "bool"
+ elif lsp_type.name in ["null"]:
+ return "object"
+
+ # null should be handled by the caller as an Option<> type
+ raise ValueError(f"Unknown base type: {lsp_type.name}")
+
+
+def get_types_for_usings(code: List[str]) -> List[str]:
+ immutable = []
+ for line in code:
+ if "ImmutableArray<" in line:
+ immutable.append("ImmutableArray")
+ if "ImmutableDictionary<" in line:
+ immutable.append("ImmutableDictionary")
+ return list(set(immutable))
+
+
+def has_null_base_type(items: List[model.LSP_TYPE_SPEC]) -> bool:
+ return any(item.kind == "base" and item.name == "null" for item in items)
+
+
+def filter_null_base_type(
+ items: List[model.LSP_TYPE_SPEC],
+) -> List[model.LSP_TYPE_SPEC]:
+ return [item for item in items if not (item.kind == "base" and item.name == "null")]
+
+
+def get_type_name(
+ type_def: model.LSP_TYPE_SPEC,
+ types: TypeData,
+ spec: model.LSPModel,
+ name_context: Optional[str] = None,
+) -> str:
+ name = None
+ if type_def.kind == "reference":
+ enum_def = _get_enum(type_def.name, spec)
+ if enum_def and enum_def.supportsCustomValues:
+ if _is_str_enum(enum_def):
+ name = f"string"
+ elif _is_int_enum(enum_def):
+ name = f"int"
+ else:
+ name = get_special_case_class_name(type_def.name)
+ elif type_def.kind == "array":
+ name = f"ImmutableArray<{get_type_name(type_def.element, types, spec, name_context)}>"
+ elif type_def.kind == "map":
+ name = generate_map_type(type_def, types, spec, name_context)
+ elif type_def.kind == "base":
+ name = lsp_to_base_types(type_def)
+ elif type_def.kind == "literal":
+ name = generate_literal_type(type_def, types, spec, name_context)
+ elif type_def.kind == "stringLiteral":
+ name = "string"
+ elif type_def.kind == "tuple":
+ subset = filter_null_base_type(type_def.items)
+ subset_types = [
+ get_type_name(item, types, spec, name_context) for item in subset
+ ]
+ name = f"({', '.join(subset_types)})"
+ elif type_def.kind == "or":
+ subset = filter_null_base_type(type_def.items)
+ if len(subset) == 1:
+ name = get_type_name(subset[0], types, spec, name_context)
+ elif len(subset) >= 2:
+ if are_variant_literals(subset):
+ name = generate_class_from_variant_literals(
+ subset, spec, types, name_context
+ )
+ else:
+ subset_types = [
+ get_type_name(item, types, spec, name_context) for item in subset
+ ]
+ name = f"OrType<{', '.join(subset_types)}>"
+ else:
+ raise ValueError(f"Unknown type kind: {type_def.kind}")
+ else:
+ raise ValueError(f"Unknown type kind: {type_def.kind}")
+ return name
+
+
+def generate_map_type(
+ type_def: model.LSP_TYPE_SPEC,
+ types: TypeData,
+ spec: model.LSPModel,
+ name_context: Optional[str] = None,
+) -> str:
+ key_type = get_type_name(type_def.key, types, spec, name_context)
+
+ if type_def.value.kind == "or":
+ subset = filter_null_base_type(type_def.value.items)
+ if len(subset) == 1:
+ value_type = get_type_name(type_def.value, types, spec, name_context)
+ else:
+ value_type = to_upper_camel_case(f"{name_context}Value")
+ type_alias = model.TypeAlias(
+ **{
+ "name": value_type,
+ "type": type_def.value,
+ }
+ )
+ generate_class_from_type_alias(type_alias, spec, types)
+
+ else:
+ value_type = get_type_name(type_def.value, types, spec, name_context)
+ return f"ImmutableDictionary<{key_type}, {value_type}>"
+
+
+def get_converter(type_def: model.LSP_TYPE_SPEC, type_name: str) -> Optional[str]:
+ if type_def.kind == "base" and type_def.name in ["DocumentUri", "URI"]:
+ return "[JsonConverter(typeof(CustomStringConverter<Uri>))]"
+ elif type_def.kind == "reference" and type_def.name in [
+ "Pattern",
+ "ChangeAnnotationIdentifier",
+ ]:
+ return f"[JsonConverter(typeof(CustomStringConverter<{type_def.name}>))]"
+ elif type_def.kind == "reference" and type_def.name == "DocumentSelector":
+ return "[JsonConverter(typeof(DocumentSelectorConverter))]"
+ elif type_def.kind == "or":
+ subset = filter_null_base_type(type_def.items)
+ if len(subset) == 1:
+ return get_converter(subset[0], type_name)
+ elif len(subset) >= 2:
+ converter = type_name.replace("OrType<", "OrTypeConverter<")
+ return f"[JsonConverter(typeof({converter}))]"
+ elif type_def.kind == "array" and type_name.startswith("OrType<"):
+ matches = ORTYPE_CONVERTER_RE.match(type_name).groupdict()
+ if "parts" in matches:
+ converter = f"OrTypeArrayConverter<{matches['parts']}>"
+ return f"[JsonConverter(typeof({converter}))]"
+ elif type_def.kind == "array":
+ matches = IMMUTABLE_ARRAY_CONVERTER_RE.match(type_name).groupdict()
+ elements = matches["elements"]
+ if elements.startswith("OrType<"):
+ matches = ORTYPE_CONVERTER_RE.match(elements).groupdict()
+ converter = f"OrTypeArrayConverter<{matches['parts']}>"
+ return f"[JsonConverter(typeof({converter}))]"
+ else:
+ converter = f"CustomArrayConverter<{elements}>"
+ return f"[JsonConverter(typeof({converter}))]"
+ return None
+
+
+def generate_property(
+ prop_def: model.Property,
+ spec: model.LSPModel,
+ types: TypeData,
+ usings: List[str],
+ class_name: str = "",
+) -> Tuple[List[str], str]:
+ if prop_def.name == "jsonrpc":
+ name = "JsonRPC"
+ else:
+ name = to_upper_camel_case(prop_def.name)
+ type_name = get_type_name(
+ prop_def.type, types, spec, f"{class_name}_{prop_def.name}"
+ )
+ converter = get_converter(prop_def.type, type_name)
+ special_optional = prop_def.type.kind == "or" and has_null_base_type(
+ prop_def.type.items
+ )
+ optional = (
+ "?"
+ if (prop_def.optional or special_optional)
+ and not (
+ type_name.startswith("ImmutableArray<")
+ or type_name.startswith("ImmutableDictionary<")
+ )
+ else ""
+ )
+ lines = (
+ get_doc(prop_def.documentation)
+ + generate_extras(prop_def)
+ + ([converter] if converter else [])
+ + (
+ ["[JsonProperty(NullValueHandling = NullValueHandling.Ignore)]"]
+ if optional and not special_optional
+ else []
+ )
+ + [
+ f'[DataMember(Name = "{prop_def.name}")]',
+ ]
+ )
+
+ if prop_def.type.kind == "stringLiteral":
+ lines.append(
+ f'public {type_name}{optional} {name} {{ get; init; }} = "{prop_def.type.value}";'
+ )
+ else:
+ lines.append(f"public {type_name}{optional} {name} {{ get; init; }}")
+
+ usings.append("DataMember")
+ if converter:
+ usings.append("JsonConverter")
+ if optional and not special_optional:
+ usings.append("JsonProperty")
+
+ return lines, type_name
+
+
+def generate_name(name_context: str, types: TypeData) -> str:
+ # If name context has a '_' it is likely a property.
+ # Try name generation using just the property name
+ parts = [to_upper_camel_case(p) for p in name_context.split("_") if len(p) > 3]
+
+ # Try the last part of the name context
+ name = parts[-1]
+ if not types.get_by_name(name) and "info" in name_context.lower():
+ return name
+
+ # Combine all parts and try again
+ name = "".join(parts)
+ if not types.get_by_name(name):
+ return name
+
+ raise ValueError(f"Unable to generate name for {name_context}")
+
+
+def generate_literal_type(
+ literal: model.LiteralType,
+ types: TypeData,
+ spec: model.LSPModel,
+ name_context: Optional[str] = None,
+) -> str:
+ if len(literal.value.properties) == 0:
+ return "LSPObject"
+
+ if types.get_by_name(literal.name) and not _get_struct(literal.name, spec):
+ return literal.name
+
+ if name_context is None:
+ raise ValueError("name_context must be provided for literal types")
+
+ if name_context.startswith("I") and name_context[1].isupper():
+ # This is a interface name ISomething => Something
+ name_context = name_context[1:]
+
+ if "_" not in name_context:
+ name_context = f"{name_context}_{get_context_from_literal(literal)}"
+
+ literal.name = generate_name(name_context, types)
+
+ usings = ["DataContract"]
+ inner = []
+ for prop in literal.value.properties:
+ prop_code, _ = generate_property(prop, spec, types, usings, literal.name)
+ inner += prop_code
+
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(usings + get_types_for_usings(inner)),
+ class_wrapper(literal, inner),
+ )
+ types.add_type_info(literal, literal.name, lines)
+ return literal.name
+
+
+def generate_constructor(
+ struct: model.Structure,
+ types: TypeData,
+ properties: List[Tuple[model.Property, str]],
+) -> List[str]:
+ class_name = get_special_case_class_name(struct.name)
+ constructor = [
+ "[JsonConstructor]",
+ f"public {class_name}(",
+ ]
+
+ arguments = []
+ optional_args = []
+ assignments = []
+ ctor_data = []
+ for prop, prop_type in properties:
+ name = get_special_case_property_name(to_camel_case(prop.name))
+ special_optional = prop.type.kind == "or" and has_null_base_type(
+ prop.type.items
+ )
+ if prop.optional or special_optional:
+ if prop_type.startswith("ImmutableArray<") or prop_type.startswith(
+ "ImmutableDictionary<"
+ ):
+ optional_args += [f"{prop_type} {name} = default!"]
+ else:
+ optional_args += [f"{prop_type}? {name} = null"]
+ ctor_data += [(prop_type, name, True)]
+ elif prop.name == "jsonrpc":
+ optional_args += [f'{prop_type} {name} = "2.0"']
+ ctor_data += [(prop_type, name, True)]
+ else:
+ arguments += [f"{prop_type} {name}"]
+ ctor_data += [(prop_type, name, False)]
+
+ if prop.name == "jsonrpc":
+ assignments += [f"JsonRPC = {name};"]
+ else:
+ assignments += [f"{to_upper_camel_case(prop.name)} = {name};"]
+
+ # combine args with a '\n' to get comma with indent
+ all_args = (",\n".join(indent_lines(arguments + optional_args))).splitlines()
+ types.add_ctor(struct.name, ctor_data)
+
+ # re-split args to get the right coma placement and indent
+ constructor += all_args
+ constructor += [")", "{"]
+ constructor += indent_lines(assignments)
+ constructor += ["}"]
+ return constructor
+
+
+def generate_class_from_struct(
+ struct: model.Structure,
+ spec: model.LSPModel,
+ types: TypeData,
+ derived: Optional[str] = None,
+ attributes: Optional[List[str]] = None,
+):
+ if types.get_by_name(struct.name) or struct.name.startswith("_"):
+ return
+
+ if attributes is None:
+ attributes = []
+
+ inner = []
+ usings = ["DataContract", "JsonConstructor"]
+
+ properties = get_all_properties(struct, spec)
+ prop_types = []
+ for prop in properties:
+ prop_code, prop_type = generate_property(prop, spec, types, usings, struct.name)
+ inner += prop_code
+ prop_types += [prop_type]
+
+ ctor = generate_constructor(struct, types, zip(properties, prop_types))
+ inner = ctor + inner
+
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(usings + get_types_for_usings(inner + attributes)),
+ class_wrapper(struct, inner, derived, attributes),
+ )
+ types.add_type_info(struct, struct.name, lines)
+
+
+def get_context_from_literal(literal: model.LiteralType) -> str:
+ if len(literal.value.properties) == 0:
+ return "LSPObject"
+
+ skipped = 0
+ skip = [
+ "range",
+ "rangeLength",
+ "position",
+ "position",
+ "location",
+ "locationLink",
+ "text",
+ ]
+ for prop in literal.value.properties:
+ if prop.name in skip:
+ skipped += 1
+ continue
+ return prop.name
+
+ if skipped == len(literal.value.properties):
+ # pick property with longest name
+ names = sorted([p.name for p in literal.value.properties])
+ return sorted(names, key=lambda n: len(n))[-1]
+
+ return ""
+
+
+def generate_type_alias_constructor(
+ type_def: model.TypeAlias, spec: model.LSPModel, types: TypeData
+) -> List[str]:
+ constructor = []
+
+ if type_def.type.kind == "or":
+ subset = filter_null_base_type(type_def.type.items)
+ if len(subset) == 1:
+ raise ValueError("Unable to generate constructor for single item union")
+ elif len(subset) >= 2:
+ type_name = to_upper_camel_case(type_def.name)
+ for t in subset:
+ sub_type = get_type_name(t, types, spec, type_def.name)
+ arg = get_special_case_property_name(to_camel_case(sub_type))
+ matches = re.match(r"ImmutableArray<(?P<arg>\w+)>", arg)
+ if matches:
+ arg = f"{matches['arg']}s"
+
+ constructor += [
+ f"public {type_name}({sub_type} {arg}): base({arg}) {{}}",
+ ]
+ else:
+ raise ValueError("Unable to generate constructor for empty union")
+ elif type_def.type.kind == "reference":
+ type_name = to_upper_camel_case(type_def.name)
+ ctor_data = types.get_ctor(type_def.type.name)
+ required = [
+ (prop_type, prop_name)
+ for prop_type, prop_name, optional in ctor_data
+ if not optional
+ ]
+ optional = [
+ (prop_type, prop_name)
+ for prop_type, prop_name, optional in ctor_data
+ if optional
+ ]
+
+ ctor_args = [f"{prop_type} {prop_name}" for prop_type, prop_name in required]
+ ctor_args += [
+ f"{prop_type}? {prop_name} = null" for prop_type, prop_name in optional
+ ]
+
+ base_args = [f"{prop_name}" for _, prop_name in required + optional]
+ constructor += [
+ f"public {type_name}({','.join(ctor_args)}): base({','.join(base_args)}) {{}}",
+ ]
+
+ return constructor
+
+
+def generate_type_alias_converter(
+ type_def: model.TypeAlias, spec: model.LSPModel, types: TypeData
+) -> None:
+ assert type_def.type.kind == "or"
+ subset_types = [
+ get_type_name(i, types, spec, type_def.name)
+ for i in filter_null_base_type(type_def.type.items)
+ ]
+ converter = f"{type_def.name}Converter"
+ or_type_converter = f"OrTypeConverter<{','.join(subset_types)}>"
+ or_type = f"OrType<{','.join(subset_types)}>"
+ code = [
+ f"public class {converter} : JsonConverter<{type_def.name}>",
+ "{",
+ f"private {or_type_converter} _orType;",
+ f"public {converter}()",
+ "{",
+ f"_orType = new {or_type_converter}();",
+ "}",
+ f"public override {type_def.name}? ReadJson(JsonReader reader, Type objectType, {type_def.name}? existingValue, bool hasExistingValue, JsonSerializer serializer)",
+ "{",
+ "reader = reader ?? throw new ArgumentNullException(nameof(reader));",
+ "if (reader.TokenType == JsonToken.Null) { return null; }",
+ f"var o = _orType.ReadJson(reader, objectType, existingValue, serializer);",
+ f"if (o is {or_type} orType)",
+ "{",
+ ]
+ for t in subset_types:
+ code += [
+ f"if (orType.Value?.GetType() == typeof({t}))",
+ "{",
+ f"return new {type_def.name}(({t})orType.Value);",
+ "}",
+ ]
+ code += [
+ "}",
+ 'throw new JsonSerializationException($"Unexpected token type.");',
+ "}",
+ f"public override void WriteJson(JsonWriter writer, {type_def.name}? value, JsonSerializer serializer)",
+ "{",
+ "_orType.WriteJson(writer, value, serializer);",
+ "}",
+ "}",
+ ]
+
+ code = namespace_wrapper(
+ NAMESPACE, get_usings(["JsonConverter"] + get_types_for_usings(code)), code
+ )
+
+ ref = model.Structure(**{"name": converter, "properties": []})
+ types.add_type_info(ref, converter, code)
+ return converter
+
+
+def generate_class_from_type_alias(
+ type_def: model.TypeAlias, spec: model.LSPModel, types: TypeData
+) -> None:
+ if types.get_by_name(type_def.name):
+ return
+
+ usings = ["DataContract"]
+ type_name = get_type_name(type_def.type, types, spec, type_def.name)
+ class_attributes = []
+ if type_def.type.kind == "or":
+ converter = generate_type_alias_converter(type_def, spec, types)
+ class_attributes += [f"[JsonConverter(typeof({converter}))]"]
+ usings.append("JsonConverter")
+
+ inner = generate_type_alias_constructor(type_def, spec, types)
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(usings + get_types_for_usings(inner)),
+ class_wrapper(type_def, inner, type_name, class_attributes),
+ )
+ types.add_type_info(type_def, type_def.name, lines)
+
+
+def generate_class_from_variant_literals(
+ literals: List[model.LiteralType],
+ spec: model.LSPModel,
+ types: TypeData,
+ name_context: Optional[str] = None,
+) -> str:
+ name = generate_name(name_context, types)
+ if types.get_by_name(name):
+ raise ValueError(f"Name {name} already exists")
+
+ struct = model.Structure(
+ **{
+ "name": name,
+ "properties": get_properties_from_literals(literals),
+ }
+ )
+
+ lines = generate_code_for_variant_struct(struct, spec, types)
+ types.add_type_info(struct, struct.name, lines)
+ return struct.name
+
+
+def get_properties_from_literals(literals: List[model.LiteralType]) -> Dict[str, Any]:
+ properties = []
+ for literal in literals:
+ assert literal.kind == "literal"
+ for prop in literal.value.properties:
+ if prop.name not in [p["name"] for p in properties]:
+ properties.append(
+ {
+ "name": prop.name,
+ "type": cattrs.unstructure(prop.type),
+ "optional": has_optional_variant(literals, prop.name), #
+ }
+ )
+ return properties
+
+
+def generate_code_for_variant_struct(
+ struct: model.Structure,
+ spec: model.LSPModel,
+ types: TypeData,
+) -> None:
+ prop_types = []
+ inner = []
+ usings = ["DataContract", "JsonConstructor"]
+ for prop in struct.properties:
+ prop_code, prop_type = generate_property(prop, spec, types, usings, struct.name)
+ inner += prop_code
+ prop_types += [prop_type]
+
+ ctor_data = []
+ constructor_args = []
+ conditions = []
+ for prop, prop_type in zip(struct.properties, prop_types):
+ name = get_special_case_property_name(to_camel_case(prop.name))
+ immutable = prop_type.startswith("ImmutableArray<") or prop_type.startswith(
+ "ImmutableDictionary<"
+ )
+ constructor_args += [
+ f"{prop_type} {name}" if immutable else f"{prop_type}? {name}"
+ ]
+ ctor_data = [(prop_type)]
+ if immutable:
+ conditions += [f"({name}.IsDefault)"]
+ else:
+ conditions += [f"({name} is null)"]
+
+ sig = ", ".join(constructor_args)
+ types.add_ctor(struct.name, ctor_data)
+ ctor = [
+ f"[JsonConstructor]",
+ f"public {struct.name}({sig})",
+ "{",
+ *indent_lines(
+ [
+ f"if ({'&&'.join(conditions)})",
+ "{",
+ *indent_lines(
+ [
+ f'throw new ArgumentException("At least one of the arguments must be non-null");'
+ ]
+ ),
+ "}",
+ ]
+ ),
+ *indent_lines(
+ [
+ f"{to_upper_camel_case(prop.name)} = {get_special_case_property_name(to_camel_case(prop.name))};"
+ for prop in struct.properties
+ ]
+ ),
+ "}",
+ ]
+
+ inner = ctor + inner
+
+ return namespace_wrapper(
+ NAMESPACE,
+ get_usings(usings + get_types_for_usings(inner)),
+ class_wrapper(struct, inner, None),
+ )
+
+
+def generate_class_from_variant_type_alias(
+ type_def: model.TypeAlias,
+ spec: model.LSPModel,
+ types: TypeData,
+ name_context: Optional[str] = None,
+) -> None:
+ struct = model.Structure(
+ **{
+ "name": type_def.name,
+ "properties": get_properties_from_literals(type_def.type.items),
+ "documentation": type_def.documentation,
+ "since": type_def.since,
+ "deprecated": type_def.deprecated,
+ "proposed": type_def.proposed,
+ }
+ )
+
+ lines = generate_code_for_variant_struct(struct, spec, types)
+ types.add_type_info(type_def, type_def.name, lines)
+
+
+def has_optional_variant(literals: List[model.LiteralType], property_name: str) -> bool:
+ count = 0
+ optional = False
+ for literal in literals:
+ for prop in literal.value.properties:
+ if prop.name == property_name:
+ count += 1
+ optional = optional or prop.optional
+ return optional and count == len(literals)
+
+
+def are_variant_literals(literals: List[model.LiteralType]) -> bool:
+ if all(i.kind == "literal" for i in literals):
+ return all(
+ has_optional_variant(literals, prop.name)
+ for prop in literals[0].value.properties
+ )
+ return False
+
+
+def is_variant_type_alias(type_def: model.TypeAlias) -> bool:
+ if type_def.type.kind == "or" and all(
+ i.kind == "literal" for i in type_def.type.items
+ ):
+ literals = type_def.type.items
+ return all(
+ has_optional_variant(literals, prop.name)
+ for prop in literals[0].value.properties
+ )
+ return False
+
+
+def copy_struct(struct_def: model.Structure, new_name: str):
+ converter = cattrs.GenConverter()
+ obj = converter.unstructure(struct_def, model.Structure)
+ obj["name"] = new_name
+ return model.Structure(**obj)
+
+
+def copy_property(prop_def: model.Property):
+ converter = cattrs.GenConverter()
+ obj = converter.unstructure(prop_def, model.Property)
+ return model.Property(**obj)
+
+
+def get_all_extends(struct_def: model.Structure, spec) -> List[model.Structure]:
+ extends = []
+ for extend in struct_def.extends:
+ extends.append(_get_struct(extend.name, spec))
+ for struct in get_all_extends(_get_struct(extend.name, spec), spec):
+ if not any(struct.name == e.name for e in extends):
+ extends.append(struct)
+ return extends
+
+
+def get_all_properties(struct: model.Structure, spec) -> List[model.Structure]:
+ properties = []
+ for prop in struct.properties:
+ properties.append(copy_property(prop))
+
+ for extend in get_all_extends(struct, spec):
+ for prop in get_all_properties(extend, spec):
+ if not any(prop.name == p.name for p in properties):
+ properties.append(copy_property(prop))
+
+ if not all(mixin.kind == "reference" for mixin in struct.mixins):
+ raise ValueError(f"Struct {struct.name} has non-reference mixins")
+ for mixin in [_get_struct(mixin.name, spec) for mixin in struct.mixins]:
+ for prop in get_all_properties(mixin, spec):
+ if not any(prop.name == p.name for p in properties):
+ properties.append(copy_property(prop))
+
+ return properties
+
+
+def generate_code_for_request(request: model.Request):
+ lines = get_doc(request.documentation) + generate_extras(request)
+ lines.append(
+ f'public static string {lsp_method_to_name(request.method)} {{ get; }} = "{request.method}";'
+ )
+ return lines
+
+
+def generate_code_for_notification(notify: model.Notification):
+ lines = get_doc(notify.documentation) + generate_extras(notify)
+ lines.append(
+ f'public static string {lsp_method_to_name(notify.method)} {{ get; }} = "{notify.method}";'
+ )
+ return lines
+
+
+def generate_request_notification_methods(spec: model.LSPModel, types: TypeData):
+ inner_lines = []
+ for request in spec.requests:
+ inner_lines += generate_code_for_request(request)
+
+ for notification in spec.notifications:
+ inner_lines += generate_code_for_notification(notification)
+
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["System"] + get_types_for_usings(inner_lines)),
+ ["public static class LSPMethods", "{", *indent_lines(inner_lines), "}"],
+ )
+ enum_type = model.Enum(
+ **{
+ "name": "LSPMethods",
+ "type": {"kind": "base", "name": "string"},
+ "values": [],
+ "documentation": "LSP methods as defined in the LSP spec",
+ }
+ )
+ types.add_type_info(enum_type, "LSPMethods", lines)
+
+
+def get_message_template(
+ obj: Union[model.Request, model.Notification],
+ is_request: bool,
+) -> model.Structure:
+ text = "Request" if is_request else "Notification"
+ properties = [
+ {
+ "name": "jsonrpc",
+ "type": {"kind": "stringLiteral", "value": "2.0"},
+ "documentation": "The jsonrpc version.",
+ }
+ ]
+ if is_request:
+ properties += [
+ {
+ "name": "id",
+ "type": {
+ "kind": "or",
+ "items": [
+ {"kind": "base", "name": "string"},
+ {"kind": "base", "name": "integer"},
+ ],
+ },
+ "documentation": f"The {text} id.",
+ }
+ ]
+ properties += [
+ {
+ "name": "method",
+ "type": {"kind": "base", "name": "string"},
+ "documentation": f"The {text} method.",
+ },
+ ]
+ if obj.params:
+ properties.append(
+ {
+ "name": "params",
+ "type": cattrs.unstructure(obj.params),
+ "documentation": f"The {text} parameters.",
+ }
+ )
+ else:
+ properties.append(
+ {
+ "name": "params",
+ "type": {"kind": "reference", "name": "LSPAny"},
+ "documentation": f"The {text} parameters.",
+ "optional": True,
+ }
+ )
+
+ class_template = {
+ "name": f"{lsp_method_to_name(obj.method)}{text}",
+ "properties": properties,
+ "documentation": obj.documentation,
+ "since": obj.since,
+ "deprecated": obj.deprecated,
+ "proposed": obj.proposed,
+ }
+ return model.Structure(**class_template)
+
+
+def get_response_template(
+ obj: model.Request, spec: model.LSPModel, types: TypeData
+) -> model.Structure:
+ properties = [
+ {
+ "name": "jsonrpc",
+ "type": {"kind": "stringLiteral", "value": "2.0"},
+ "documentation": "The jsonrpc version.",
+ },
+ {
+ "name": "id",
+ "type": {
+ "kind": "or",
+ "items": [
+ {"kind": "base", "name": "string"},
+ {"kind": "base", "name": "integer"},
+ ],
+ },
+ "documentation": f"The Request id.",
+ },
+ ]
+ if obj.result:
+ properties.append(
+ {
+ "name": "result",
+ "type": cattrs.unstructure(obj.result),
+ "documentation": f"Results for the request.",
+ "optional": True,
+ }
+ )
+ else:
+ properties.append(
+ {
+ "name": "result",
+ "type": {"kind": "base", "name": "null"},
+ "documentation": f"Results for the request.",
+ "optional": True,
+ }
+ )
+ properties.append(
+ {
+ "name": "error",
+ "type": {"kind": "reference", "name": "ResponseError"},
+ "documentation": f"Error while handling the request.",
+ "optional": True,
+ }
+ )
+ class_template = {
+ "name": f"{lsp_method_to_name(obj.method)}Response",
+ "properties": properties,
+ "documentation": obj.documentation,
+ "since": obj.since,
+ "deprecated": obj.deprecated,
+ "proposed": obj.proposed,
+ }
+ return model.Structure(**class_template)
+
+
+def get_registration_options_template(
+ obj: Union[model.Request, model.Notification],
+ spec: model.LSPModel,
+ types: TypeData,
+) -> model.Structure:
+ if obj.registrationOptions and obj.registrationOptions.kind != "reference":
+ if obj.registrationOptions.kind == "and":
+ structs = [_get_struct(s.name, spec) for s in obj.registrationOptions.items]
+ properties = []
+ for struct in structs:
+ properties += get_all_properties(struct, spec)
+
+ class_template = {
+ "name": f"{lsp_method_to_name(obj.method)}RegistrationOptions",
+ "properties": [
+ cattrs.unstructure(p, model.Property) for p in properties
+ ],
+ }
+ return model.Structure(**class_template)
+ else:
+ raise ValueError(
+ f"Unexpected registrationOptions type: {obj.registrationOptions.type.kind}"
+ )
+ return None
+
+
+def generate_all_classes(spec: model.LSPModel, types: TypeData):
+ for struct in spec.structures:
+ generate_class_from_struct(struct, spec, types)
+
+ for type_alias in spec.typeAliases:
+ if is_variant_type_alias(type_alias):
+ generate_class_from_variant_type_alias(type_alias, spec, types)
+ else:
+ generate_class_from_type_alias(type_alias, spec, types)
+
+ generate_request_notification_methods(spec, types)
+
+ for request in spec.requests:
+ partial_result_name = None
+ if request.partialResult:
+ partial_result_name = get_type_name(request.partialResult, types, spec)
+
+ struct = get_message_template(request, is_request=True)
+ generate_class_from_struct(
+ struct,
+ spec,
+ types,
+ (
+ f"IRequest<{get_type_name(request.params, types, spec)}>"
+ if request.params
+ else "IRequest<LSPAny?>"
+ ),
+ [
+ f"[Direction(MessageDirection.{to_upper_camel_case(request.messageDirection)})]",
+ f'[LSPRequest("{request.method}", typeof({lsp_method_to_name(request.method)}Response), typeof({partial_result_name}))]'
+ if partial_result_name
+ else f'[LSPRequest("{request.method}", typeof({lsp_method_to_name(request.method)}Response))]',
+ ],
+ )
+ response = get_response_template(request, spec, types)
+ generate_class_from_struct(
+ response,
+ spec,
+ types,
+ f"IResponse<{get_type_name(request.result, types, spec)}>",
+ [
+ f"[LSPResponse(typeof({lsp_method_to_name(request.method)}Request))]",
+ ],
+ )
+ registration_options = get_registration_options_template(request, spec, types)
+ if registration_options:
+ generate_class_from_struct(
+ registration_options,
+ spec,
+ types,
+ )
+
+ for notification in spec.notifications:
+ struct = get_message_template(notification, is_request=False)
+ generate_class_from_struct(
+ struct,
+ spec,
+ types,
+ (
+ f"INotification<{get_type_name(notification.params, types, spec)}>"
+ if notification.params
+ else "INotification<LSPAny>"
+ ),
+ [
+ f"[Direction(MessageDirection.{to_upper_camel_case(request.messageDirection)})]",
+ ],
+ )
+ registration_options = get_registration_options_template(
+ notification, spec, types
+ )
+ if registration_options:
+ generate_class_from_struct(
+ registration_options,
+ spec,
+ types,
+ )
diff --git a/generator/plugins/dotnet/dotnet_commons.py b/generator/plugins/dotnet/dotnet_commons.py
new file mode 100644
index 0000000..9c21eca
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_commons.py
@@ -0,0 +1,60 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Dict, List, Tuple, Union
+
+from generator import model
+
+TypesWithId = Union[
+ model.Request,
+ model.TypeAlias,
+ model.Enum,
+ model.Structure,
+ model.Notification,
+ model.LiteralType,
+ model.ReferenceType,
+ model.ReferenceMapKeyType,
+ model.Property,
+ model.EnumItem,
+]
+
+
+class TypeData:
+ def __init__(self) -> None:
+ self._id_data: Dict[str, Tuple[str, TypesWithId, List[str]]] = {}
+ self._ctor_data: Dict[str, Tuple[str, str]] = {}
+
+ def add_type_info(
+ self,
+ type_def: TypesWithId,
+ type_name: str,
+ impl: List[str],
+ ) -> None:
+ if type_def.id_ in self._id_data:
+ raise Exception(f"Duplicate id {type_def.id_} for type {type_name}")
+ self._id_data[type_def.id_] = (type_name, type_def, impl)
+
+ def has_id(
+ self,
+ type_def: TypesWithId,
+ ) -> bool:
+ return type_def.id_ in self._id_data
+
+ def has_name(self, type_name: str) -> bool:
+ return any(type_name == name for name, _, _ in self._id_data.values())
+
+ def get_by_name(self, type_name: str) -> List[TypesWithId]:
+ return [
+ type_def
+ for name, type_def, _ in self._id_data.values()
+ if name == type_name
+ ]
+
+ def get_all(self) -> List[Tuple[str, List[str]]]:
+ return [(name, lines) for name, _, lines in self._id_data.values()]
+
+ def add_ctor(self, type_name: str, ctor: Tuple[str, str, bool]) -> None:
+ self._ctor_data[type_name] = ctor
+
+ def get_ctor(self, type_name: str) -> Tuple[str, str, bool]:
+ return self._ctor_data[type_name]
diff --git a/generator/plugins/dotnet/dotnet_constants.py b/generator/plugins/dotnet/dotnet_constants.py
new file mode 100644
index 0000000..f66c96b
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_constants.py
@@ -0,0 +1,5 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+NAMESPACE = "Microsoft.LanguageServer.Protocol"
+PACKAGE_DIR_NAME = "lsprotocol"
diff --git a/generator/plugins/dotnet/dotnet_enums.py b/generator/plugins/dotnet/dotnet_enums.py
new file mode 100644
index 0000000..c5e7a6c
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_enums.py
@@ -0,0 +1,51 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Dict, List, Union
+
+from generator import model
+
+from .dotnet_commons import TypeData
+from .dotnet_constants import NAMESPACE
+from .dotnet_helpers import (
+ indent_lines,
+ lines_to_doc_comments,
+ namespace_wrapper,
+ to_upper_camel_case,
+)
+
+
+def generate_enums(spec: model.LSPModel, types: TypeData) -> None:
+ """Generate the code for the given spec."""
+ for enum_def in spec.enumerations:
+ types.add_type_info(enum_def, enum_def.name, generate_enum(enum_def))
+
+
+def _get_enum_doc(enum: Union[model.Enum, model.EnumItem]) -> List[str]:
+ doc = enum.documentation.splitlines(keepends=False) if enum.documentation else []
+ return lines_to_doc_comments(doc)
+
+
+def generate_enum(enum: model.Enum) -> List[str]:
+ use_enum_member = all(isinstance(item.value, str) for item in enum.values)
+ imports = ["using System.Runtime.Serialization;"]
+ if use_enum_member:
+ imports += ["using Newtonsoft.Json;", "using Newtonsoft.Json.Converters;"]
+
+ lines = _get_enum_doc(enum)
+ if use_enum_member:
+ lines += ["[JsonConverter(typeof(StringEnumConverter))]"]
+ lines += [f"public enum {enum.name}", "{"]
+
+ for item in enum.values:
+ name = to_upper_camel_case(item.name)
+ inner = _get_enum_doc(item)
+ if use_enum_member:
+ inner += [f'[EnumMember(Value = "{item.value}")]{name},']
+ else:
+ inner += [f"{name} = {item.value},"]
+ lines += indent_lines(inner) + [""]
+
+ lines += ["}"]
+
+ return namespace_wrapper(NAMESPACE, (imports if use_enum_member else []), lines)
diff --git a/generator/plugins/dotnet/dotnet_helpers.py b/generator/plugins/dotnet/dotnet_helpers.py
new file mode 100644
index 0000000..2eb357a
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_helpers.py
@@ -0,0 +1,215 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import re
+from typing import List, Optional, Union
+
+from generator import model
+
+BASIC_LINK_RE = re.compile(r"{@link +(\w+) ([\w ]+)}")
+BASIC_LINK_RE2 = re.compile(r"{@link +(\w+)\.(\w+) ([\w \.`]+)}")
+BASIC_LINK_RE3 = re.compile(r"{@link +(\w+)}")
+BASIC_LINK_RE4 = re.compile(r"{@link +(\w+)\.(\w+)}")
+PARTS_RE = re.compile(r"(([a-z0-9])([A-Z]))")
+
+
+def _fix_links(line: str) -> str:
+ line = BASIC_LINK_RE.sub(r'<see cref="\1">\2</see>', line)
+ line = BASIC_LINK_RE2.sub(r'<see cref="\1.\2">\3</see>', line)
+ line = BASIC_LINK_RE3.sub(r'<see cref="\1" />', line)
+ line = BASIC_LINK_RE4.sub(r'<see cref="\1.\2" />', line)
+ return line
+
+
+def lines_to_doc_comments(lines: List[str]) -> List[str]:
+ if not lines:
+ return []
+
+ return (
+ ["/// <summary>"]
+ + [f"/// {_fix_links(line)}" for line in lines if not line.startswith("@")]
+ + ["/// </summary>"]
+ )
+
+
+def get_parts(name: str) -> List[str]:
+ name = name.replace("_", " ")
+ return PARTS_RE.sub(r"\2 \3", name).split()
+
+
+def to_camel_case(name: str) -> str:
+ parts = get_parts(name)
+ return parts[0] + "".join([p.capitalize() for p in parts[1:]])
+
+
+def to_upper_camel_case(name: str) -> str:
+ return "".join([c.capitalize() for c in get_parts(name)])
+
+
+def lsp_method_to_name(method: str) -> str:
+ if method.startswith("$"):
+ method = method[1:]
+ method = method.replace("/", "_")
+ return to_upper_camel_case(method)
+
+
+def file_header() -> List[str]:
+ return [
+ "// Copyright (c) Microsoft Corporation. All rights reserved.",
+ "// Licensed under the MIT License.",
+ "// ",
+ "// THIS FILE IS AUTOGENERATED, DO NOT MODIFY IT",
+ "",
+ ]
+
+
+def namespace_wrapper(
+ namespace: str, imports: List[str], lines: List[str]
+) -> List[str]:
+ indent = " " * 4
+ return (
+ file_header()
+ + imports
+ + [""]
+ + ["namespace " + namespace + " {"]
+ + [(f"{indent}{line}" if line else line) for line in lines]
+ + ["}", ""]
+ )
+
+
+def get_doc(doc: Optional[str]) -> str:
+ if doc:
+ return lines_to_doc_comments(doc.splitlines(keepends=False))
+ return []
+
+
+def get_special_case_class_name(name: str) -> str:
+ # This is because C# does not allow class name and property name to be the same.
+ # public class Command{ public string Command { get; set; }} is not valid.
+ if name == "Command":
+ return "CommandAction"
+ return name
+
+
+def get_special_case_property_name(name: str) -> str:
+ if name == "string":
+ return "stringValue"
+ if name == "int":
+ return "intValue"
+ if name == "event":
+ return "eventArgs"
+ if name == "params":
+ return "paramsValue"
+ return name
+
+
+def class_wrapper(
+ type_def: Union[model.Structure, model.Notification, model.Request],
+ inner: List[str],
+ derived: Optional[str] = None,
+ class_attributes: Optional[List[str]] = None,
+ is_record=True,
+) -> List[str]:
+ if hasattr(type_def, "name"):
+ name = get_special_case_class_name(type_def.name)
+ else:
+ raise ValueError(f"Unknown type: {type_def}")
+
+ rec_or_cls = "record" if is_record else "class"
+ lines = (
+ get_doc(type_def.documentation)
+ + generate_extras(type_def)
+ + (class_attributes if class_attributes else [])
+ + [
+ "[DataContract]",
+ f"public {rec_or_cls} {name}: {derived}"
+ if derived
+ else f"public {rec_or_cls} {name}",
+ "{",
+ ]
+ )
+ lines += indent_lines(inner)
+ lines += ["}", ""]
+ return lines
+
+
+def property_wrapper(prop_def: model.Property, content: List[str]) -> List[str]:
+ lines = (get_doc(prop_def.documentation) + generate_extras(prop_def) + content,)
+ lines += indent_lines(content)
+ return lines
+
+
+def indent_lines(lines: List[str], indent: str = " " * 4) -> List[str]:
+ return [(f"{indent}{line}" if line else line) for line in lines]
+
+
+def cleanup_str(text: str) -> str:
+ return text.replace("\r", "").replace("\n", "")
+
+
+def get_deprecated(text: Optional[str]) -> Optional[str]:
+ if not text:
+ return None
+
+ lines = text.splitlines(keepends=False)
+ for line in lines:
+ if line.startswith("@deprecated"):
+ return line.replace("@deprecated", "").strip()
+ return None
+
+
+def generate_extras(
+ type_def: Union[
+ model.Enum,
+ model.EnumItem,
+ model.Property,
+ model.TypeAlias,
+ model.Structure,
+ model.Request,
+ model.Notification,
+ ]
+) -> List[str]:
+ deprecated = get_deprecated(type_def.documentation)
+ extras = []
+ if type_def.deprecated:
+ extras += [f'[Obsolete("{cleanup_str(type_def.deprecated)}")]']
+ elif deprecated:
+ extras += [f'[Obsolete("{cleanup_str(deprecated)}")]']
+ if type_def.proposed:
+ extras += [f"[Proposed]"]
+ if type_def.since:
+ extras += [f'[Since("{cleanup_str(type_def.since)}")]']
+
+ if hasattr(type_def, "messageDirection"):
+ if type_def.since:
+ extras += [
+ f"[Direction(MessageDirection.{to_upper_camel_case(type_def.messageDirection)})]"
+ ]
+
+ return extras
+
+
+def get_usings(types: List[str]) -> List[str]:
+ usings = []
+
+ for t in ["DataMember", "DataContract"]:
+ if t in types:
+ usings.append("using System.Runtime.Serialization;")
+
+ for t in ["JsonConverter", "JsonConstructor", "JsonProperty", "NullValueHandling"]:
+ if t in types:
+ usings.append("using Newtonsoft.Json;")
+
+ for t in ["JToken", "JObject", "JArray"]:
+ if t in types:
+ usings.append("using Newtonsoft.Json.Linq;")
+
+ for t in ["List", "Dictionary"]:
+ if t in types:
+ usings.append("using System.Collections.Generic;")
+
+ for t in ["ImmutableArray", "ImmutableDictionary"]:
+ if t in types:
+ usings.append("using System.Collections.Immutable;")
+
+ return sorted(list(set(usings)))
diff --git a/generator/plugins/dotnet/dotnet_special_classes.py b/generator/plugins/dotnet/dotnet_special_classes.py
new file mode 100644
index 0000000..a656b6d
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_special_classes.py
@@ -0,0 +1,158 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Dict, List, Union
+
+from generator import model
+
+from .dotnet_commons import TypeData
+from .dotnet_constants import NAMESPACE
+from .dotnet_helpers import class_wrapper, get_usings, namespace_wrapper
+
+SPECIAL_CLASSES = [
+ "LSPObject",
+ "LSPAny",
+ "LSPArray",
+ "ChangeAnnotationIdentifier",
+ "Pattern",
+ "DocumentSelector",
+ "InitializedParams",
+]
+
+
+def generate_special_classes(spec: model.LSPModel, types: TypeData) -> None:
+ """Generate code for special classes in LSP."""
+ for special_class in SPECIAL_CLASSES:
+ for class_def in spec.structures + spec.typeAliases:
+ if class_def.name == special_class:
+ generate_special_class(class_def, spec, types)
+
+
+def generate_special_class(
+ type_def: Union[model.Structure, model.TypeAlias],
+ spec: model.LSPModel,
+ types: TypeData,
+) -> Dict[str, str]:
+ """Generate code for a special class."""
+ lines: List[str] = []
+
+ if type_def.name == "LSPObject":
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["Dictionary", "DataContract", "JsonConverter"]),
+ class_wrapper(
+ type_def,
+ ["public LSPObject(Dictionary<string, object?> value):base(value){}"],
+ "Dictionary<string, object?>",
+ ["[JsonConverter(typeof(CustomObjectConverter<LSPObject>))]"],
+ is_record=False,
+ ),
+ )
+ if type_def.name == "InitializedParams":
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["Dictionary", "DataContract", "JsonConverter"]),
+ class_wrapper(
+ type_def,
+ [
+ "public InitializedParams(Dictionary<string, object?> value):base(value){}"
+ ],
+ "Dictionary<string, object?>",
+ ["[JsonConverter(typeof(CustomObjectConverter<InitializedParams>))]"],
+ is_record=False,
+ ),
+ )
+ if type_def.name == "LSPAny":
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["DataContract", "JsonConverter"]),
+ class_wrapper(
+ type_def,
+ [
+ "public LSPAny(object? value){this.Value = value;}",
+ "public object? Value { get; set; }",
+ ],
+ "object",
+ ["[JsonConverter(typeof(LSPAnyConverter))]"],
+ ),
+ )
+ if type_def.name == "LSPArray":
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["DataContract", "List"]),
+ class_wrapper(
+ type_def,
+ ["public LSPArray(List<object> value):base(value){}"],
+ "List<object>",
+ is_record=False,
+ ),
+ )
+
+ if type_def.name == "Pattern":
+ inner = [
+ "private string pattern;",
+ "public Pattern(string value){pattern = value;}",
+ "public static implicit operator Pattern(string value) => new Pattern(value);",
+ "public static implicit operator string(Pattern pattern) => pattern.pattern;",
+ "public override string ToString() => pattern;",
+ ]
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["JsonConverter", "DataContract"]),
+ class_wrapper(
+ type_def,
+ inner,
+ None,
+ [f"[JsonConverter(typeof(CustomStringConverter<{type_def.name}>))]"],
+ ),
+ )
+
+ if type_def.name == "ChangeAnnotationIdentifier":
+ inner = [
+ "private string identifier;",
+ "public ChangeAnnotationIdentifier(string value){identifier = value;}",
+ "public static implicit operator ChangeAnnotationIdentifier(string value) => new ChangeAnnotationIdentifier(value);",
+ "public static implicit operator string(ChangeAnnotationIdentifier identifier) => identifier.identifier;",
+ "public override string ToString() => identifier;",
+ ]
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["JsonConverter", "DataContract"]),
+ class_wrapper(
+ type_def,
+ inner,
+ None,
+ [f"[JsonConverter(typeof(CustomStringConverter<{type_def.name}>))]"],
+ ),
+ )
+
+ if type_def.name == "DocumentSelector":
+ inner = [
+ "private DocumentFilter[] Filters { get; set; }",
+ "public DocumentSelector(params DocumentFilter[] filters)",
+ "{",
+ " Filters = filters ?? Array.Empty<DocumentFilter>();",
+ "}",
+ "public DocumentFilter this[int index]",
+ "{",
+ " get { return Filters[index]; }",
+ " set { Filters[index] = value; }",
+ "}",
+ "public int Length => Filters.Length;",
+ "public static implicit operator DocumentSelector(DocumentFilter[] filters) => new(filters);",
+ "public static implicit operator DocumentFilter[](DocumentSelector selector) => selector.Filters;",
+ "public IEnumerator<DocumentFilter> GetEnumerator() => ((IEnumerable<DocumentFilter>)Filters).GetEnumerator();",
+ "System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() => Filters.GetEnumerator();",
+ ]
+ lines = namespace_wrapper(
+ NAMESPACE,
+ get_usings(["JsonConverter", "DataContract"]),
+ class_wrapper(
+ type_def,
+ inner,
+ "IEnumerable<DocumentFilter>",
+ [f"[JsonConverter(typeof(DocumentSelectorConverter))]"],
+ ),
+ )
+
+ types.add_type_info(type_def, type_def.name, lines)
diff --git a/generator/plugins/dotnet/dotnet_utils.py b/generator/plugins/dotnet/dotnet_utils.py
new file mode 100644
index 0000000..4510527
--- /dev/null
+++ b/generator/plugins/dotnet/dotnet_utils.py
@@ -0,0 +1,58 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import logging
+import os
+import pathlib
+import subprocess
+from typing import Dict, List
+
+import generator.model as model
+
+from .dotnet_classes import generate_all_classes
+from .dotnet_commons import TypeData
+from .dotnet_constants import NAMESPACE, PACKAGE_DIR_NAME
+from .dotnet_enums import generate_enums
+from .dotnet_helpers import namespace_wrapper
+from .dotnet_special_classes import generate_special_classes
+
+LOGGER = logging.getLogger("dotnet")
+
+
+def generate_from_spec(spec: model.LSPModel, output_dir: str) -> None:
+ """Generate the code for the given spec."""
+ output_path = pathlib.Path(output_dir, PACKAGE_DIR_NAME)
+ if not output_path.exists():
+ output_path.mkdir(parents=True, exist_ok=True)
+
+ cleanup(output_path)
+ copy_custom_classes(output_path)
+
+ LOGGER.info("Generating code in C#")
+ types = TypeData()
+ generate_package_code(spec, types)
+
+ for name, lines in types.get_all():
+ file_name = f"{name}.cs"
+ (output_path / file_name).write_text("\n".join(lines), encoding="utf-8")
+
+
+def generate_package_code(spec: model.LSPModel, types: TypeData) -> Dict[str, str]:
+ generate_enums(spec, types)
+ generate_special_classes(spec, types)
+ generate_all_classes(spec, types)
+
+
+def cleanup(output_path: pathlib.Path) -> None:
+ """Cleanup the generated C# files."""
+ for file in output_path.glob("*.cs"):
+ file.unlink()
+
+
+def copy_custom_classes(output_path: pathlib.Path) -> None:
+ """Copy the custom classes to the output directory."""
+ custom = pathlib.Path(__file__).parent / "custom"
+ for file in custom.glob("*.cs"):
+ lines = file.read_text(encoding="utf-8").splitlines()
+ lines = namespace_wrapper(NAMESPACE, [], lines)
+ (output_path / file.name).write_text("\n".join(lines), encoding="utf-8")
diff --git a/generator/plugins/python/__init__.py b/generator/plugins/python/__init__.py
new file mode 100644
index 0000000..c5de3ef
--- /dev/null
+++ b/generator/plugins/python/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from .utils import generate_from_spec as generate
diff --git a/generator/plugins/python/utils.py b/generator/plugins/python/utils.py
new file mode 100644
index 0000000..1d5f08d
--- /dev/null
+++ b/generator/plugins/python/utils.py
@@ -0,0 +1,1122 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import collections
+import copy
+import itertools
+import keyword
+import pathlib
+import re
+from typing import Dict, List, Optional, OrderedDict, Sequence, Tuple, Union
+
+import generator.model as model
+
+METHOD_NAME_RE_1 = re.compile(r"(.)([A-Z][a-z]+)")
+METHOD_NAME_RE_2 = re.compile(r"([a-z0-9])([A-Z])")
+PACKAGE_NAME = "lsprotocol"
+
+# These are special type aliases to preserve backward compatibility.
+custom_request_params_aliases = ["WorkspaceConfigurationParams"]
+
+
+def generate_from_spec(spec: model.LSPModel, output_dir: str) -> None:
+ code = TypesCodeGenerator(spec).get_code()
+
+ output_path = pathlib.Path(output_dir, PACKAGE_NAME)
+ if not output_path.exists():
+ output_path.mkdir(parents=True, exist_ok=True)
+
+ for file_name in code:
+ (output_path / file_name).write_text(code[file_name], encoding="utf-8")
+
+
+def _generate_field_validator(
+ type_def: model.LSP_TYPE_SPEC, optional: bool = False
+) -> str:
+ """Generates attrs.field validator for a given field base of type."""
+
+ if type_def.kind == "base":
+ if type_def.name == "integer":
+ validator = "validators.integer_validator"
+ elif type_def.name == "uinteger":
+ validator = "validators.uinteger_validator"
+ elif type_def.name in ["string", "DocumentUri", "URI", "Uri"]:
+ validator = "attrs.validators.instance_of(str)"
+ elif type_def.name == "boolean":
+ validator = "attrs.validators.instance_of(bool)"
+ elif type_def.name == "decimal":
+ validator = "attrs.validators.instance_of(float)"
+ else:
+ validator = None
+ elif type_def.kind == "stringLiteral":
+ return f"attrs.field(validator=attrs.validators.in_(['{type_def.value}']), default='{type_def.value}')"
+ else:
+ validator = None
+
+ if optional:
+ if validator:
+ return f"attrs.field(validator=attrs.validators.optional({validator}), default=None)"
+ else:
+ return "attrs.field(default=None)"
+ else:
+ if validator:
+ return f"attrs.field(validator={validator})"
+ else:
+ return "attrs.field()"
+
+
+def _to_class_name(lsp_method_name: str) -> str:
+ """Convert from LSP method name (e.g., textDocument/didSave) to python class name
+ (e.g., TextDocumentDidSave)"""
+ name = lsp_method_name[2:] if lsp_method_name.startswith("$/") else lsp_method_name
+ name = name.replace("/", "_")
+ name = METHOD_NAME_RE_1.sub(r"\1_\2", name)
+ name = METHOD_NAME_RE_2.sub(r"\1_\2", name)
+ return "".join(part.title() for part in name.split("_"))
+
+
+def lines_to_str(lines: Union[Sequence[str], List[str]]) -> str:
+ return "\n".join(lines)
+
+
+def _sanitize_comment(text: str) -> str:
+ """LSP spec comments can contain newlines or characters that should not be used or
+ can cause issues with python code clean them up."""
+ return text.replace("\r", "").replace("\n", "")
+
+
+def _is_special_field(prop: model.Property) -> bool:
+ """Detect if the field requires special handling when serialising."""
+ return prop.type.kind == "stringLiteral" or _has_null_base_type(prop)
+
+
+def _has_null_base_type(prop: model.Property) -> bool:
+ """Detect if the type is indirectly optional."""
+ if prop.type.kind == "or":
+ # If one of the types in the item list is a `null` then that means the
+ # field can be None. So we can treat that field as optional.
+ return any(t.kind == "base" and t.name == "null" for t in prop.type.items)
+ else:
+ return False
+
+
+def _to_snake_case(name: str) -> str:
+ new_name = METHOD_NAME_RE_1.sub(r"\1_\2", name)
+ new_name = METHOD_NAME_RE_2.sub(r"\1_\2", new_name)
+ new_name = new_name.lower()
+ return f"{new_name}_" if keyword.iskeyword(new_name) else new_name
+
+
+def _snake_case_item_name(original: str) -> str:
+ """Generate snake case names from LSP definition names.
+
+ Example:
+ * PlainText -> PLAIN_TEXT
+ * $import -> IMPORT
+ """
+ new_name = original
+ if new_name.startswith("$"):
+ new_name = new_name[1:]
+ if new_name.startswith("/"):
+ new_name = new_name[1:]
+ new_name = new_name.replace("/", "_")
+ new_name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", new_name)
+ new_name = re.sub("([a-z0-9])([A-Z])", r"\1_\2", new_name)
+ return f"{new_name}_" if keyword.iskeyword(new_name) else new_name
+
+
+def _capitalized_item_name(original: str) -> str:
+ """Generate capitalized names from LSP definition names.
+
+ Example:
+ * someClass -> SomeClass
+ * some_class -> SomeClass
+ """
+ parts = _snake_case_item_name(original).split("_")
+ new_name = "".join(x.title() for x in parts)
+ return f"{new_name}_" if keyword.iskeyword(new_name) else new_name
+
+
+def _get_indented_documentation(
+ documentation: Optional[str], indent: str = ""
+) -> Optional[str]:
+ """Clean up doc string from LSP model and word wrap with correct indent level."""
+ doc = (
+ indent.join(documentation.splitlines(keepends=True)) if documentation else None
+ )
+ if doc:
+ doc = doc.replace("**​/*", "**/*").replace("∕", "/")
+ doc = doc[:-2] if doc.endswith("*/") else doc
+ doc = doc.strip()
+ doc = re.sub(r"\[(?P<class>[A-Za-z]*)\]\(\#(?P=class)\)", r"\1", doc)
+ doc = re.sub(r"\[(?P<class>[\S]*)(\[\])\]\(\#(?P=class)\)", r"\1\2", doc)
+ doc = re.sub(r"\[([\w\ ]+)\]\(\#[\w\.]+\)", r"\1", doc)
+ return doc
+
+
+class TypesCodeGenerator:
+ def __init__(self, lsp_model: model.LSPModel):
+ self._lsp_model = lsp_model
+ self._reset()
+
+ def _reset(self):
+ self._types: OrderedDict[str, List[str]] = collections.OrderedDict()
+ self._imports: List[str] = [
+ "import enum",
+ "import functools",
+ "from typing import Any, Dict, List, Optional, Tuple, Union",
+ "import attrs",
+ "from . import validators",
+ ]
+ self._keyword_classes: List[str] = []
+ self._special_classes: List[str] = []
+ self._special_properties: List[str] = []
+
+ def _add_keyword_class(self, class_name) -> None:
+ if class_name not in self._keyword_classes:
+ self._keyword_classes.append(class_name)
+
+ def _get_imports(self) -> List[str]:
+ return self._imports
+
+ def _get_header(self) -> List[str]:
+ return [
+ "# Copyright (c) Microsoft Corporation. All rights reserved.",
+ "# Licensed under the MIT License.",
+ "",
+ "# ****** THIS IS A GENERATED FILE, DO NOT EDIT. ******",
+ "# Steps to generate:",
+ "# 1. Checkout https://github.com/microsoft/lsprotocol",
+ "# 2. Install nox: `python -m pip install nox`",
+ "# 3. Run command: `python -m nox --session build_lsp`",
+ "",
+ ]
+
+ def get_code(self) -> Dict[str, str]:
+ self._reset()
+ self._generate_code(self._lsp_model)
+
+ code_lines = (
+ self._get_header()
+ + self._get_imports()
+ + self._get_meta_data(self._lsp_model)
+ + self._get_types_code()
+ + self._get_utility_code(self._lsp_model)
+ )
+ return {
+ "types.py": lines_to_str(code_lines),
+ }
+
+ def _get_custom_value_type(self, ref_name: str) -> Optional[str]:
+ """Returns the custom supported type."""
+ try:
+ enum_def = [e for e in self._lsp_model.enumerations if e.name == ref_name][
+ 0
+ ]
+ except IndexError:
+ enum_def = None
+ if enum_def and enum_def.supportsCustomValues:
+ if enum_def.type.name == "string":
+ return "str"
+ if enum_def.type.name in ["integer", "uinteger"]:
+ return "int"
+ return None
+
+ def _generate_type_name(
+ self,
+ type_def: model.LSP_TYPE_SPEC,
+ class_name: Optional[str] = None,
+ prefix: str = "",
+ ) -> str:
+ """Get typing wrapped type name based on LSP type definition."""
+
+ if type_def.kind == "stringLiteral":
+ # These are string constants used in some LSP types.
+ # TODO: Use this with python >= 3.8
+ # return f"Literal['{type_def.value}']"
+ return "str"
+
+ if type_def.kind == "literal":
+ # A general type 'Any' has no properties
+ if (
+ isinstance(type_def.value, model.LiteralValue)
+ and len(type_def.value.properties) == 0
+ ):
+ return "Any"
+
+ # The literal kind is a dynamically generated type and the
+ # name for it is generated as needed. It is expected that when
+ # this function is called name is set.
+ if type_def.name:
+ return f"'{type_def.name}'"
+
+ # If name is missing, and there are properties then it is a dynamic
+ # type. It should have already been generated.
+ raise ValueError(str(type_def))
+
+ if type_def.kind == "reference":
+ # The reference kind is a named type which is part of LSP.
+ if self._has_type(type_def.name):
+ ref_type = f"{prefix}{type_def.name}"
+ else:
+ # We don't have this type yet. Make it a forward reference.
+ ref_type = f"'{prefix}{type_def.name}'"
+ custom_value_type = self._get_custom_value_type(type_def.name)
+ if custom_value_type:
+ return f"Union[{ref_type}, {custom_value_type}]"
+
+ return ref_type
+
+ if type_def.kind == "array":
+ # This is a linear collection type, LSP does not specify if
+ # this needs to be ordered. Also, usingList here because
+ # cattrs does not work well withIterable for some reason.
+ return f"List[{self._generate_type_name(type_def.element, class_name, prefix)}]"
+
+ if type_def.kind == "or":
+ # This type means that you can have either of the types under `items`
+ # as the value. So, from typing point of view this is a union. The `or`
+ # type means it is going to be one of the types, never both (see `and`)
+ # Example:
+ # id :Union[str, int]
+ # * This means that id can either be string or integer, cannot be both.
+ types = []
+ for item in type_def.items:
+ types.append(self._generate_type_name(item, class_name, prefix))
+ return f"Union[{','.join(types)}]"
+
+ if type_def.kind == "and":
+ # This type means that the value has properties of all the types under
+ # `items`. This type is equivalent of `class C(A, B)`. Where A and B are
+ # defined in `items`. This type should be generated separately, here we
+ # return the optionally provided class for this.
+ if not class_name:
+ raise ValueError(str(type_def))
+ return class_name
+
+ if type_def.kind == "base":
+ # The `base` kind is used for primitive data types.
+ if type_def.name == "decimal":
+ return "float"
+ elif type_def.name == "boolean":
+ return "bool"
+ elif type_def.name in ["integer", "uinteger"]:
+ return "int"
+ elif type_def.name in ["string", "DocumentUri", "URI"]:
+ return "str"
+ elif type_def.name == "null":
+ return "None"
+ else:
+ # Unknown base kind.
+ raise ValueError(str(type_def))
+
+ if type_def.kind == "map":
+ # This kind defines a dictionary like object.
+ return f"Dict[{self._generate_type_name(type_def.key, class_name, prefix)}, {self._generate_type_name(type_def.value, class_name, prefix)}]"
+
+ if type_def.kind == "tuple":
+ # This kind defined a tuple like object.
+ types = []
+ for item in type_def.items:
+ types.append(self._generate_type_name(item, class_name, prefix))
+ return f"Tuple[{','.join(types)}]"
+
+ raise ValueError(str(type_def))
+
+ def _add_special(self, class_name: str, properties: List[str]) -> None:
+ if properties:
+ self._special_classes.append(class_name)
+ self._special_properties.extend([f"'{class_name}.{p}'" for p in properties])
+
+ def _get_types_code(self) -> List[str]:
+ code_lines = []
+ for v in self._types.values():
+ code_lines.extend(v)
+ # Add blank lines between types
+ code_lines.extend(["", ""])
+
+ return code_lines
+
+ def _add_import(self, import_line: str) -> None:
+ if import_line not in self._imports:
+ self._imports.append(import_line)
+
+ def _has_type(self, type_name: str) -> bool:
+ if type_name.startswith(('"', "'")):
+ type_name = type_name[1:-1]
+ return type_name in self._types
+
+ def _get_additional_methods(self, class_name: str) -> List[str]:
+ indent = " " * 4
+ if class_name == "Position":
+ return [
+ "def __eq__(self, o: object) -> bool:",
+ f"{indent}if not isinstance(o, Position):",
+ f"{indent}{indent}return NotImplemented",
+ f"{indent}return (self.line, self.character) == (o.line, o.character)",
+ "def __gt__(self, o: 'Position') -> bool:",
+ f"{indent}if not isinstance(o, Position):",
+ f"{indent}{indent}return NotImplemented",
+ f"{indent}return (self.line, self.character) > (o.line, o.character)",
+ "def __repr__(self) -> str:",
+ f"{indent}" + "return f'{self.line}:{self.character}'",
+ ]
+ if class_name == "Range":
+ return [
+ "def __eq__(self, o: object) -> bool:",
+ f"{indent}if not isinstance(o, Range):",
+ f"{indent}{indent}return NotImplemented",
+ f"{indent}return (self.start == o.start) and (self.end == o.end)",
+ "def __repr__(self) -> str:",
+ f"{indent}" + "return f'{self.start!r}-{self.end!r}'",
+ ]
+ if class_name == "Location":
+ return [
+ "def __eq__(self, o: object) -> bool:",
+ f"{indent}if not isinstance(o, Location):",
+ f"{indent}{indent}return NotImplemented",
+ f"{indent}return (self.uri == o.uri) and (self.range == o.range)",
+ "def __repr__(self) -> str:",
+ f"{indent}" + "return f'{self.uri}:{self.range!r}'",
+ ]
+ return None
+
+ def _add_type_code(self, type_name: str, code: List[str]) -> None:
+ if not self._has_type(type_name):
+ self._types[type_name] = code
+ self._types.move_to_end(type_name)
+
+ def _add_enum(self, enum_def: model.Enum) -> None:
+ code_lines = [
+ "" if "ErrorCodes" in enum_def.name else "@enum.unique",
+ ]
+ if enum_def.type.name == "string":
+ code_lines += [f"class {enum_def.name}(str, enum.Enum):"]
+ elif enum_def.type.name in ["integer", "uinteger"]:
+ code_lines += [f"class {enum_def.name}(int, enum.Enum):"]
+ else:
+ code_lines += [f"class {enum_def.name}(enum.Enum):"]
+
+ indent = " " * 4
+ doc = _get_indented_documentation(enum_def.documentation, indent)
+ code_lines += [
+ f'{indent}"""{doc}"""' if enum_def.documentation else "",
+ f"{indent}# Since: {_sanitize_comment(enum_def.since)}"
+ if enum_def.since
+ else "",
+ f"{indent}# Proposed" if enum_def.proposed else "",
+ ]
+
+ # Remove unnecessary empty lines
+ code_lines = [code for code in code_lines if len(code) > 0]
+
+ for item in enum_def.values:
+ name = _capitalized_item_name(item.name)
+ value = (
+ f'"{item.value}"' if enum_def.type.name == "string" else f"{item.value}"
+ )
+ doc = _get_indented_documentation(item.documentation, indent)
+ item_lines = [
+ f"{indent}{name} = {value}",
+ f'{indent}"""{doc}"""' if item.documentation else "",
+ f"{indent}# Since: {_sanitize_comment(item.since)}"
+ if item.since
+ else "",
+ f"{indent}# Proposed" if item.proposed else "",
+ ]
+
+ # Remove unnecessary empty lines.
+ code_lines += [code for code in item_lines if len(code) > 0]
+
+ self._add_type_code(enum_def.name, code_lines)
+
+ def _add_enums(self, lsp_model: model.LSPModel) -> None:
+ for enum_def in lsp_model.enumerations:
+ self._add_enum(enum_def)
+
+ def _process_literal_types(
+ self, class_name: str, type_def: model.LSP_TYPE_SPEC
+ ) -> None:
+ if type_def.kind == "literal" and len(type_def.value.properties) > 0:
+ type_def.name = type_def.name or _to_class_name(f"{class_name}_Type")
+ self._add_literal_type(type_def)
+ elif type_def.kind == "or":
+ count = itertools.count(1)
+ for sub_type in type_def.items or []:
+ try:
+ # Anonymous types have no name so generate a name. We append `_Type#`
+ # to generate the name, where `#` is a number.
+ sub_type.name = sub_type.name or _to_class_name(
+ f"{class_name}_Type{next(count)}"
+ )
+ except AttributeError:
+ pass
+ self._process_literal_types(class_name, sub_type)
+ elif type_def.kind == "array":
+ try:
+ type_def.element.name = type_def.element.name or _to_class_name(
+ f"{class_name}_Type"
+ )
+ except AttributeError:
+ pass
+ self._process_literal_types(class_name, type_def.element)
+ elif type_def.kind == "and":
+ raise ValueError(str(type_def))
+ else:
+ pass
+
+ def _generate_properties(
+ self, class_name: str, properties: List[model.Property], indent: str
+ ) -> List[str]:
+ code_lines = []
+
+ # Ensure that we mark any property as optional if it supports None type.
+ # We only need to do this for properties not explicitly marked as optional.
+ for p in properties:
+ if not p.optional:
+ p.optional = _has_null_base_type(p)
+
+ # sort properties so that you have non-optional properties first then optional properties
+ properties = [
+ p for p in properties if not (p.optional or p.type.kind == "stringLiteral")
+ ] + [p for p in properties if p.optional or p.type.kind == "stringLiteral"]
+
+ for property_def in properties:
+ self._process_literal_types(
+ f"{class_name}/{property_def.name}", property_def.type
+ )
+
+ doc = _get_indented_documentation(property_def.documentation, indent)
+ type_validator = _generate_field_validator(
+ property_def.type, property_def.optional
+ )
+
+ type_name = self._generate_type_name(property_def.type)
+ if property_def.optional:
+ type_name = f"Optional[{type_name}]"
+
+ # make sure that property name is not a python keyword and snake cased.
+ name = _to_snake_case(property_def.name)
+
+ prop_lines = [f"{indent}{name}: {type_name} = {type_validator}"]
+ prop_lines += [
+ f'{indent}"""{doc}"""' if property_def.documentation else "",
+ f"{indent}# Since: {_sanitize_comment(property_def.since)}"
+ if property_def.since
+ else "",
+ f"{indent}# Proposed" if property_def.proposed else "",
+ ]
+ # Remove unnecessary empty lines and add a single empty line
+ code_lines += [code for code in prop_lines if len(code) > 0] + [""]
+
+ return code_lines
+
+ def _add_literal_type(self, literal_def: model.LiteralType) -> None:
+ if self._has_type(literal_def.name):
+ return
+
+ # indent level for use with fields, doc string, and comments.
+ indent = " " * 4
+
+ # clean up the docstring for the class itself.
+ doc = _get_indented_documentation(literal_def.documentation, indent)
+
+ # Code here should include class, its doc string, and any comments.
+ code_lines = [
+ "@attrs.define",
+ f"class {literal_def.name}:",
+ f'{indent}"""{doc}"""' if literal_def.documentation else "",
+ f"{indent}# Since: {literal_def.since}" if literal_def.since else "",
+ f"{indent}# Proposed" if literal_def.proposed else "",
+ ]
+
+ # Remove unnecessary empty lines. This can happen if doc string or comments are missing.
+ code_lines = [code for code in code_lines if len(code) > 0]
+
+ code_lines += self._generate_properties(
+ literal_def.name, literal_def.value.properties, indent
+ )
+
+ self._add_type_code(literal_def.name, code_lines)
+
+ if any(keyword.iskeyword(p.name) for p in literal_def.value.properties):
+ self._add_keyword_class(literal_def.name)
+
+ self._add_special(
+ literal_def.name,
+ [
+ _to_snake_case(p.name)
+ for p in literal_def.value.properties
+ if _is_special_field(p)
+ ],
+ )
+
+ def _add_type_alias(self, type_alias: model.TypeAlias) -> None:
+ # TypeAlias definition can contain anonymous types as a part of its
+ # definition. We generate them here first before we get to defile the
+ # TypeAlias.
+ indent = " " * 4
+ count = itertools.count(1)
+ if type_alias.type.kind == "or":
+ for sub_type in type_alias.type.items or []:
+ if sub_type.kind == "literal":
+ # Anonymous types have no name so generate a name. We append `_Type#`
+ # to generate the name, where `#` is a number.
+ sub_type.name = (
+ sub_type.name or f"{type_alias.name}_Type{next(count)}"
+ )
+ self._add_literal_type(sub_type)
+
+ if type_alias.name == "LSPAny":
+ type_name = "Union[Any, None]"
+ elif type_alias.name == "LSPObject":
+ type_name = None
+ else:
+ type_name = self._generate_type_name(type_alias.type)
+ if type_alias.type.kind == "reference" and not self._has_type(
+ type_alias.type.name
+ ):
+ # TODO: remove workaround for lack of TypeAlias in 3.7
+ type_name = f"Union[{type_name}, {type_name}]"
+
+ if type_name:
+ # clean up the docstring for the class itself.
+ doc = _get_indented_documentation(type_alias.documentation)
+ code_lines = [
+ f"{type_alias.name} = {type_name}",
+ f'"""{doc}"""' if type_alias.documentation else "",
+ f"# Since: {_sanitize_comment(type_alias.since)}"
+ if type_alias.since
+ else "",
+ f"# Proposed" if type_alias.proposed else "",
+ ]
+ else:
+ doc = _get_indented_documentation(type_alias.documentation, indent)
+ code_lines = [
+ f"class {type_alias.name}:",
+ f'{indent}"""{doc}"""' if type_alias.documentation else "",
+ f"{indent}# Since: {_sanitize_comment(type_alias.since)}"
+ if type_alias.since
+ else "",
+ f"{indent}# Proposed" if type_alias.proposed else "",
+ f"{indent}pass",
+ ]
+ code_lines = [code for code in code_lines if len(code) > 0]
+
+ self._add_type_code(type_alias.name, code_lines)
+
+ def _add_type_aliases(self, lsp_model: model.LSPModel) -> None:
+ for type_def in lsp_model.typeAliases:
+ self._add_type_alias(type_def)
+
+ def _get_dependent_types(
+ self,
+ struct_def: model.Structure,
+ lsp_model: model.LSPModel,
+ ) -> List[model.Structure]:
+ # `extends` and `mixins` both are used as classes from which the
+ # current class to derive from.
+ extends = struct_def.extends or []
+ mixins = struct_def.mixins or []
+
+ definitions: List[model.Structure] = []
+ for s in extends + mixins:
+ for t in lsp_model.structures:
+ if t.name == s.name and s.kind == "reference":
+ definitions.append(t)
+ definitions.extend(self._get_dependent_types(t, lsp_model))
+
+ result: List[model.Structure] = []
+ for d in definitions:
+ if d.name in [r.name for r in result]:
+ pass
+ else:
+ result.append(d)
+ return result
+
+ def _add_structure(
+ self,
+ struct_def: model.Structure,
+ lsp_model: model.LSPModel,
+ ) -> None:
+ if self._has_type(struct_def.name):
+ return
+
+ definitions = self._get_dependent_types(struct_def, lsp_model)
+ for d in definitions:
+ self._add_structure(d, lsp_model)
+
+ indent = "" if struct_def.name == "LSPObject" else " " * 4
+ doc = _get_indented_documentation(struct_def.documentation, indent)
+ class_name = struct_def.name
+
+ class_lines = [
+ "" if class_name == "LSPObject" else "@attrs.define",
+ "@functools.total_ordering" if class_name == "Position" else "",
+ f"{class_name} = object"
+ if class_name == "LSPObject"
+ else f"class {class_name}:",
+ f'{indent}"""{doc}"""' if struct_def.documentation else "",
+ f"{indent}# Since: {_sanitize_comment(struct_def.since)}"
+ if struct_def.since
+ else "",
+ f"{indent}# Proposed" if struct_def.proposed else "",
+ ]
+
+ # Remove unnecessary empty lines and add a single empty line
+ code_lines = [code for code in class_lines if len(code) > 0] + [""]
+
+ # Inheriting from multiple classes can cause problems especially when using
+ # `attrs.define`.
+ properties = copy.deepcopy(struct_def.properties)
+ extra_properties = []
+ for d in definitions:
+ extra_properties += copy.deepcopy(d.properties)
+
+ for p in extra_properties:
+ prop_names = [prop.name for prop in properties]
+ if p.name not in prop_names:
+ properties += [copy.deepcopy(p)]
+
+ code_lines += self._generate_properties(class_name, properties, indent)
+ methods = self._get_additional_methods(class_name)
+
+ # If the class has no properties then add `pass`
+ if len(properties) == 0 and not methods and class_name != "LSPObject":
+ code_lines += [f"{indent}pass"]
+
+ if methods:
+ code_lines += [f"{indent}{l}" for l in methods]
+
+ # Detect if the class has properties that might be keywords.
+ self._add_type_code(class_name, code_lines)
+
+ if any(keyword.iskeyword(p.name) for p in properties):
+ self._add_keyword_class(class_name)
+
+ self._add_special(
+ class_name,
+ [_to_snake_case(p.name) for p in properties if _is_special_field(p)],
+ )
+
+ def _add_structures(self, lsp_model: model.LSPModel) -> None:
+ for struct_def in lsp_model.structures:
+ self._add_structure(struct_def, lsp_model)
+
+ def _add_and_type(
+ self,
+ type_def: model.LSP_TYPE_SPEC,
+ class_name: str,
+ structures: List[model.Structure],
+ ) -> Tuple[List[str], List[str]]:
+ if type_def.kind != "and":
+ raise ValueError("Only `and` type code generation is supported.")
+
+ indent = " " * 4
+ code_lines = [
+ "@attrs.define",
+ f"class {class_name}:",
+ ]
+
+ properties = []
+ for item in type_def.items:
+ if item.kind == "reference":
+ for structure in structures:
+ if structure.name == item.name:
+ properties += copy.deepcopy(structure.properties)
+ else:
+ raise ValueError(
+ "Only `reference` types are supported for `and` type generation."
+ )
+
+ code_lines += self._generate_properties(class_name, properties, indent)
+
+ self._add_type_code(class_name, code_lines)
+ if any(keyword.iskeyword(p.name) for p in properties):
+ self._add_keyword_class(class_name)
+
+ self._add_special(
+ class_name,
+ [_to_snake_case(p.name) for p in properties if _is_special_field(p)],
+ )
+
+ def _add_and_types(self, lsp_model: model.LSPModel) -> None:
+ # Collect all and types in the model from known locations
+ and_types = []
+ for request in lsp_model.requests:
+ if request.params:
+ if request.params.kind == "and":
+ class_name = f"{_to_class_name(request.method)}Params"
+ and_types.append((f"{class_name}", request.params))
+
+ if request.registrationOptions:
+ if request.registrationOptions.kind == "and":
+ class_name = f"{_to_class_name(request.method)}Options"
+ and_types.append((f"{class_name}", request.registrationOptions))
+
+ for notification in lsp_model.notifications:
+ if notification.params:
+ if notification.params.kind == "and":
+ class_name = f"{_to_class_name(notification.method)}Params"
+ and_types.append((f"{class_name}", notification.params))
+
+ if notification.registrationOptions:
+ if notification.registrationOptions.kind == "and":
+ class_name = f"{_to_class_name(notification.method)}Options"
+ and_types.append(
+ (f"{class_name}", notification.registrationOptions)
+ )
+
+ for name, type_def in and_types:
+ self._add_and_type(type_def, name, lsp_model.structures)
+
+ def _add_requests(self, lsp_mode: model.LSPModel) -> None:
+ indent = " " * 4
+
+ self._add_type_code(
+ "ResponseError",
+ [
+ "@attrs.define",
+ f"class ResponseError:",
+ f"{indent}code: int = attrs.field(validator=validators.integer_validator)",
+ f'{indent}"""A number indicating the error type that occurred."""',
+ f"{indent}message: str = attrs.field(validator=attrs.validators.instance_of(str))",
+ f'{indent}"""A string providing a short description of the error."""',
+ f"{indent}data:Optional[LSPAny] = attrs.field(default=None)",
+ f'{indent}"""A primitive or structured value that contains additional information',
+ f'{indent}about the error. Can be omitted."""',
+ ],
+ )
+
+ self._add_type_code(
+ "ResponseErrorMessage",
+ [
+ "@attrs.define",
+ "class ResponseErrorMessage:",
+ f"{indent}id:Optional[Union[int, str]] = attrs.field(default=None)",
+ f'{indent}"""The request id where the error occurred."""',
+ f"{indent}error:Optional[ResponseError] = attrs.field(default=None)",
+ f'{indent}"""The error object in case a request fails."""',
+ f'{indent}jsonrpc: str = attrs.field(default="2.0")',
+ ],
+ )
+
+ self._add_special("ResponseErrorMessage", ["error", "jsonrpc"])
+
+ for request in lsp_mode.requests:
+ class_name = _to_class_name(request.method)
+ doc = _get_indented_documentation(request.documentation, indent)
+
+ if request.params:
+ if (
+ request.params.kind == "reference"
+ and f"{class_name}Params" in custom_request_params_aliases
+ ):
+ params_type = f"{class_name}Params"
+
+ self._add_type_alias(
+ model.TypeAlias(
+ name=params_type,
+ type={"kind": "reference", "name": request.params.name},
+ )
+ )
+ else:
+ params_type = self._generate_type_name(
+ request.params, f"{class_name}Params"
+ )
+ if not self._has_type(params_type):
+ raise ValueError(f"{class_name}Params type definition is missing.")
+ params_field = "attrs.field()"
+ else:
+ params_type = "Optional[None]"
+ params_field = "attrs.field(default=None)"
+
+ result_type = None
+ if request.result:
+ result_type = self._generate_type_name(request.result)
+ result_field = "attrs.field(default=None)"
+ else:
+ result_type = "Optional[None]"
+ result_field = "attrs.field(default=None)"
+
+ self._add_type_code(
+ f"{class_name}Request",
+ [
+ "@attrs.define",
+ f"class {class_name}Request:",
+ f'{indent}"""{doc}"""' if request.documentation else "",
+ f"{indent}id:Union[int, str] = attrs.field()",
+ f'{indent}"""The request id."""',
+ f"{indent}params: {params_type} ={params_field}",
+ f'{indent}method: str = "{request.method}"',
+ f'{indent}"""The method to be invoked."""',
+ f'{indent}jsonrpc: str = attrs.field(default="2.0")',
+ ],
+ )
+ self._add_special(f"{class_name}Request", ["method", "jsonrpc"])
+
+ self._add_type_code(
+ f"{class_name}Response",
+ [
+ "@attrs.define",
+ f"class {class_name}Response:",
+ f"{indent}id:Optional[Union[int, str]] = attrs.field()",
+ f'{indent}"""The request id."""',
+ f"{indent}result: {result_type} = {result_field}",
+ f'{indent}jsonrpc: str = attrs.field(default="2.0")',
+ ],
+ )
+ self._add_special(f"{class_name}Response", ["result", "jsonrpc"])
+
+ def _add_notifications(self, lsp_mode: model.LSPModel) -> None:
+ indent = " " * 4
+
+ for notification in lsp_mode.notifications:
+ class_name = _to_class_name(notification.method)
+ doc = _get_indented_documentation(notification.documentation, indent)
+
+ if notification.params:
+ params_type = self._generate_type_name(
+ notification.params, f"{class_name}Params"
+ )
+ if not self._has_type(params_type):
+ raise ValueError(f"{class_name}Params type definition is missing.")
+ params_field = "attrs.field()"
+ else:
+ params_type = "Optional[None]"
+ params_field = "attrs.field(default=None)"
+
+ self._add_type_code(
+ f"{class_name}Notification",
+ [
+ "@attrs.define",
+ f"class {class_name}Notification:",
+ f'{indent}"""{doc}"""' if notification.documentation else "",
+ f"{indent}params: {params_type} = {params_field}",
+ f"{indent}method:str = attrs.field(",
+ f'validator=attrs.validators.in_(["{notification.method}"]),',
+ f'default="{notification.method}",',
+ ")",
+ f'{indent}"""The method to be invoked."""',
+ f'{indent}jsonrpc: str = attrs.field(default="2.0")',
+ ],
+ )
+ self._add_special(f"{class_name}Notification", ["method", "jsonrpc"])
+
+ def _add_lsp_method_type(self, lsp_model: model.LSPModel) -> None:
+ indent = " " * 4
+ directions = set(
+ [x.messageDirection for x in (lsp_model.requests + lsp_model.notifications)]
+ )
+ code_lines = [
+ "@enum.unique",
+ "class MessageDirection(enum.Enum):",
+ ]
+ code_lines += sorted(
+ [f"{indent}{_capitalized_item_name(m)} = '{m}'" for m in directions]
+ )
+ self._add_type_code("MessageDirection", code_lines)
+
+ def _add_special_types(self, lsp_model: model.LSPModel) -> None:
+ # Ensure LSPObject gets added first.
+ # Try and find it in the type aliases
+ lsp_object = list(
+ filter(
+ lambda s: s.name == "LSPObject",
+ [*lsp_model.typeAliases, *lsp_model.structures],
+ )
+ )
+
+ if len(lsp_object) == 0:
+ raise ValueError("LSPObject type definition is missing.")
+ elif len(lsp_object) > 1:
+ raise ValueError("LSPObject type definition is duplicated.")
+ else:
+ if isinstance(lsp_object[0], model.TypeAlias):
+ self._add_type_alias(lsp_object[0])
+ elif isinstance(lsp_object[0], model.Structure):
+ self._add_structure(lsp_object[0], lsp_model)
+ else:
+ raise ValueError("LSPObject type definition is invalid.")
+
+ def _generate_code(self, lsp_model: model.LSPModel) -> None:
+ self._add_enums(lsp_model)
+ self._add_special_types(lsp_model)
+ self._add_type_aliases(lsp_model)
+ self._add_structures(lsp_model)
+ self._add_and_types(lsp_model)
+ self._add_requests(lsp_model)
+ self._add_notifications(lsp_model)
+ self._add_lsp_method_type(lsp_model)
+
+ def _get_utility_code(self, lsp_model: model.LSPModel) -> List[str]:
+ request_classes = []
+ response_classes = []
+ notification_classes = []
+
+ methods = set(
+ [x.method for x in (lsp_model.requests + lsp_model.notifications)]
+ )
+ code_lines = (
+ [""]
+ + sorted([f"{_snake_case_item_name(m).upper()} = '{m}'" for m in methods])
+ + [""]
+ )
+
+ code_lines += ["METHOD_TO_TYPES = {", " # Requests"]
+
+ request_types = []
+ for request in lsp_model.requests:
+ class_name = _to_class_name(request.method)
+ request_class = f"{class_name}Request"
+ response_class = f"{class_name}Response"
+
+ request_classes.append(request_class)
+ response_classes.append(response_class)
+
+ params_type = None
+ if request.params:
+ params_type = self._generate_type_name(
+ request.params, f"{class_name}Params"
+ ).strip("\"'")
+
+ registration_type = None
+ if request.registrationOptions:
+ registration_type = self._generate_type_name(
+ request.registrationOptions, f"{class_name}Options"
+ ).strip("\"'")
+
+ key = f"{_snake_case_item_name(request.method).upper()}"
+ request_types += [
+ f"{key}: ({request_class}, {response_class}, {params_type}, {registration_type}),"
+ ]
+
+ code_lines += sorted(request_types)
+ code_lines += [" # Notifications"]
+
+ notify_types = []
+ for notification in lsp_model.notifications:
+ class_name = _to_class_name(notification.method)
+ notification_class = f"{class_name}Notification"
+ notification_classes.append(notification_class)
+
+ params_type = None
+ if notification.params:
+ params_type = self._generate_type_name(
+ notification.params, f"{class_name}Params"
+ ).strip("\"'")
+
+ registration_type = None
+ if notification.registrationOptions:
+ registration_type = self._generate_type_name(
+ notification.registrationOptions, f"{class_name}Options"
+ ).strip("\"'")
+
+ key = f"{_snake_case_item_name(notification.method).upper()}"
+ notify_types += [
+ f"{key}: ({notification_class}, None, {params_type}, {registration_type}),"
+ ]
+
+ code_lines += sorted(notify_types)
+ code_lines += ["}"]
+
+ code_lines += [
+ f"REQUESTS = Union[{', '.join(sorted(request_classes))}]",
+ f"RESPONSES = Union[{', '.join(sorted(response_classes))}]",
+ f"NOTIFICATIONS = Union[{', '.join(sorted(notification_classes))}]",
+ "MESSAGE_TYPES = Union[REQUESTS, RESPONSES, NOTIFICATIONS, ResponseErrorMessage]",
+ "",
+ ]
+
+ # These classes have properties that may be python keywords.
+ code_lines += [
+ f"_KEYWORD_CLASSES = [{', '.join(sorted(set(self._keyword_classes)))}]"
+ ]
+ code_lines += [
+ "def is_keyword_class(cls: type) -> bool:",
+ ' """Returns true if the class has a property that may be python keyword."""',
+ " return any(cls is c for c in _KEYWORD_CLASSES)",
+ "",
+ ]
+
+ # These are classes that have properties that need special handling
+ # during serialization of the class based on LSP.
+ # See: https://github.com/microsoft/vscode-languageserver-node/issues/967
+ code_lines += [
+ f"_SPECIAL_CLASSES = [{', '.join(sorted(set(self._special_classes)))}]"
+ ]
+ code_lines += [
+ "def is_special_class(cls: type) -> bool:",
+ ' """Returns true if the class or its properties require special handling."""',
+ " return any(cls is c for c in _SPECIAL_CLASSES)",
+ "",
+ ]
+
+ # This is a collection of `class_name.property` as string. These properties
+ # need special handling as described by LSP>
+ # See: https://github.com/microsoft/vscode-languageserver-node/issues/967
+ #
+ # Example:
+ # Consider RenameRegistrationOptions
+ # * document_selector property:
+ # When you set `document_selector` to None in python it has to be preserved when
+ # serializing it. Since the serialized JSON value `{"document_selector": null}`
+ # means use the Clients document selector. Omitting it might throw error.
+ # * prepare_provider property
+ # This property does NOT need special handling, since omitting it or using
+ # `{"prepare_provider": null}` has the same meaning.
+ code_lines += [
+ f"_SPECIAL_PROPERTIES = [{', '.join(sorted(set(self._special_properties)))}]"
+ ]
+ code_lines += [
+ "def is_special_property(cls: type, property_name:str) -> bool:",
+ ' """Returns true if the class or its properties require special handling.',
+ " Example:",
+ " Consider RenameRegistrationOptions",
+ " * document_selector property:",
+ " When you set `document_selector` to None in python it has to be preserved when",
+ ' serializing it. Since the serialized JSON value `{"document_selector": null}`',
+ " means use the Clients document selector. Omitting it might throw error. ",
+ " * prepare_provider property",
+ " This property does NOT need special handling, since omitting it or using",
+ ' `{"prepare_provider": null}` in JSON has the same meaning.',
+ ' """',
+ ' qualified_name = f"{cls.__name__}.{property_name}"',
+ " return qualified_name in _SPECIAL_PROPERTIES",
+ "",
+ ]
+
+ code_lines += ["", "ALL_TYPES_MAP: Dict[str, Union[type, object]] = {"]
+ code_lines += sorted([f"'{name}': {name}," for name in set(self._types.keys())])
+ code_lines += ["}", ""]
+
+ code_lines += ["_MESSAGE_DIRECTION: Dict[str, str] = {"]
+
+ code_lines += ["# Request methods"]
+ code_lines += sorted(
+ [
+ f'{_snake_case_item_name(r.method).upper()}:"{r.messageDirection}",'
+ for r in lsp_model.requests
+ ]
+ )
+ code_lines += ["# Notification methods"]
+ code_lines += sorted(
+ [
+ f'{_snake_case_item_name(n.method).upper()}:"{n.messageDirection}",'
+ for n in lsp_model.notifications
+ ]
+ )
+
+ code_lines += ["}", ""]
+
+ code_lines += [
+ "def message_direction(method:str) -> str:",
+ ' """Returns message direction clientToServer, serverToClient or both."""',
+ " return _MESSAGE_DIRECTION[method]",
+ "",
+ ]
+
+ return code_lines
+
+ def _get_meta_data(self, lsp_model: model.LSPModel) -> List[str]:
+ return [f"__lsp_version__ = '{lsp_model.metaData.version}'"]
diff --git a/generator/plugins/rust/__init__.py b/generator/plugins/rust/__init__.py
new file mode 100644
index 0000000..cbc2c00
--- /dev/null
+++ b/generator/plugins/rust/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from .rust_utils import generate_from_spec as generate
diff --git a/generator/plugins/rust/rust_commons.py b/generator/plugins/rust/rust_commons.py
new file mode 100644
index 0000000..4022e27
--- /dev/null
+++ b/generator/plugins/rust/rust_commons.py
@@ -0,0 +1,681 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Dict, List, Optional, Tuple, Union
+
+from generator import model
+
+from .rust_lang_utils import (
+ get_parts,
+ indent_lines,
+ lines_to_doc_comments,
+ to_snake_case,
+ to_upper_camel_case,
+)
+
+TypesWithId = Union[
+ model.Request,
+ model.TypeAlias,
+ model.Enum,
+ model.Structure,
+ model.Notification,
+ model.LiteralType,
+ model.ReferenceType,
+ model.ReferenceMapKeyType,
+ model.Property,
+ model.EnumItem,
+]
+
+
+class TypeData:
+ def __init__(self) -> None:
+ self._id_data: Dict[
+ str,
+ Tuple[
+ str,
+ TypesWithId,
+ List[str],
+ ],
+ ] = {}
+
+ def add_type_info(
+ self,
+ type_def: TypesWithId,
+ type_name: str,
+ impl: List[str],
+ ) -> None:
+ if type_def.id_ in self._id_data:
+ raise Exception(f"Duplicate id {type_def.id_} for type {type_name}")
+ self._id_data[type_def.id_] = (type_name, type_def, impl)
+
+ def has_id(
+ self,
+ type_def: TypesWithId,
+ ) -> bool:
+ return type_def.id_ in self._id_data
+
+ def has_name(self, type_name: str) -> bool:
+ return any(type_name == name for name, _, _ in self._id_data.values())
+
+ def get_by_name(self, type_name: str) -> List[TypesWithId]:
+ return [type_name == name for name, _, _ in self._id_data.values()]
+
+ def get_lines(self):
+ lines = []
+ for _, _, impl in self._id_data.values():
+ lines += impl + ["", ""]
+ return lines
+
+
+def generate_custom_enum(type_data: TypeData) -> None:
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="CustomStringEnum"),
+ "CustomStringEnum",
+ [
+ "/// This type allows extending any string enum to support custom values.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum CustomStringEnum<T> {",
+ " /// The value is one of the known enum values.",
+ " Known(T),",
+ " /// The value is custom.",
+ " Custom(String),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="CustomIntEnum"),
+ "CustomIntEnum",
+ [
+ "/// This type allows extending any integer enum to support custom values.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum CustomIntEnum<T> {",
+ " /// The value is one of the known enum values.",
+ " Known(T),",
+ " /// The value is custom.",
+ " Custom(i32),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="OR2"),
+ "OR2",
+ [
+ "/// This allows a field to have two types.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum OR2<T, U> {",
+ " T(T),",
+ " U(U),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="OR3"),
+ "OR3",
+ [
+ "/// This allows a field to have three types.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum OR3<T, U, V> {",
+ " T(T),",
+ " U(U),",
+ " V(V),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="OR4"),
+ "OR4",
+ [
+ "/// This allows a field to have four types.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum OR4<T, U, V, W> {",
+ " T(T),",
+ " U(U),",
+ " V(V),",
+ " W(W),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="OR5"),
+ "OR5",
+ [
+ "/// This allows a field to have five types.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum OR5<T, U, V, W, X> {",
+ " T(T),",
+ " U(U),",
+ " V(V),",
+ " W(W),",
+ " X(X),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="OR6"),
+ "OR6",
+ [
+ "/// This allows a field to have six types.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum OR6<T, U, V, W, X, Y> {",
+ " T(T),",
+ " U(U),",
+ " V(V),",
+ " W(W),",
+ " X(X),",
+ " Y(Y),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="OR7"),
+ "OR7",
+ [
+ "/// This allows a field to have seven types.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum OR7<T, U, V, W, X, Y, Z> {",
+ " T(T),",
+ " U(U),",
+ " V(V),",
+ " W(W),",
+ " X(X),",
+ " Y(Y),",
+ " Z(Z),",
+ "}",
+ "",
+ ],
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="LSPNull"),
+ "LSPNull",
+ [
+ "/// This allows a field to always have null or empty value.",
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum LSPNull {",
+ " None,",
+ "}",
+ "",
+ ],
+ )
+
+
+def get_definition(
+ name: str, spec: model.LSPModel
+) -> Optional[Union[model.TypeAlias, model.Structure]]:
+ for type_def in spec.typeAliases + spec.structures:
+ if type_def.name == name:
+ return type_def
+ return None
+
+
+def generate_special_types(model: model.LSPModel, types: TypeData) -> None:
+ special_types = [
+ get_definition("LSPAny", model),
+ get_definition("LSPObject", model),
+ get_definition("LSPArray", model),
+ get_definition("SelectionRange", model),
+ ]
+
+ for type_def in special_types:
+ if type_def:
+ doc = (
+ type_def.documentation.splitlines(keepends=False)
+ if type_def.documentation
+ else []
+ )
+ lines = lines_to_doc_comments(doc)
+ lines += generate_extras(type_def)
+
+ if type_def.name == "LSPAny":
+ lines += [
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ "pub enum LSPAny {",
+ " String(String),",
+ " Integer(i32),",
+ " UInteger(u32),",
+ " Decimal(Decimal),",
+ " Boolean(bool),",
+ " Object(LSPObject),",
+ " Array(LSPArray),",
+ " Null,",
+ "}",
+ ]
+ elif type_def.name == "LSPObject":
+ lines += ["type LSPObject = serde_json::Value;"]
+ elif type_def.name == "LSPArray":
+ lines += ["type LSPArray = Vec<LSPAny>;"]
+ elif type_def.name == "SelectionRange":
+ lines += [
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "pub struct SelectionRange {",
+ ]
+ for property in type_def.properties:
+ doc = (
+ property.documentation.splitlines(keepends=False)
+ if property.documentation
+ else []
+ )
+ lines += lines_to_doc_comments(doc)
+ lines += generate_extras(property)
+ prop_name = to_snake_case(property.name)
+ prop_type = get_type_name(
+ property.type, types, model, property.optional
+ )
+ if "SelectionRange" in prop_type:
+ prop_type = prop_type.replace(
+ "SelectionRange", "Box<SelectionRange>"
+ )
+ lines += [f"pub {prop_name}: {prop_type},"]
+ lines += [""]
+ lines += ["}"]
+ lines += [""]
+ types.add_type_info(type_def, type_def.name, lines)
+
+
+def fix_lsp_method_name(name: str) -> str:
+ if name.startswith("$/"):
+ name = name[2:]
+ return to_upper_camel_case(name.replace("/", "_"))
+
+
+def generate_special_enum(enum_name: str, items: List[str]) -> Dict[str, List[str]]:
+ lines = [
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ f"pub enum {enum_name}" "{",
+ ]
+ for item in items:
+ lines += indent_lines(
+ [
+ f'#[serde(rename = "{item}")]',
+ f"{fix_lsp_method_name(item)},",
+ ]
+ )
+ lines += ["}"]
+ return lines
+
+
+def generate_extra_types(spec: model.LSPModel, type_data: TypeData) -> None:
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="LSPRequestMethods"),
+ "LSPRequestMethods",
+ generate_special_enum("LSPRequestMethods", [m.method for m in spec.requests]),
+ )
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="LSPNotificationMethods"),
+ "LSPNotificationMethods",
+ generate_special_enum(
+ "LSPNotificationMethods", [m.method for m in spec.notifications]
+ ),
+ )
+
+ direction = set([m.messageDirection for m in (spec.requests + spec.notifications)])
+ type_data.add_type_info(
+ model.ReferenceType(kind="reference", name="MessageDirection"),
+ "MessageDirection",
+ generate_special_enum("MessageDirection", direction),
+ )
+
+
+def generate_commons(
+ model: model.LSPModel, type_data: TypeData
+) -> Dict[str, List[str]]:
+ generate_custom_enum(type_data)
+ generate_special_types(model, type_data)
+ generate_extra_types(model, type_data)
+
+
+def lsp_to_base_types(lsp_type: model.BaseType):
+ if lsp_type.name in ["string", "DocumentUri", "URI", "RegExp"]:
+ return "String"
+ elif lsp_type.name in ["decimal"]:
+ return "Decimal"
+ elif lsp_type.name in ["integer"]:
+ return "i32"
+ elif lsp_type.name in ["uinteger"]:
+ return "u32"
+ elif lsp_type.name in ["boolean"]:
+ return "bool"
+
+ # null should be handled by the caller as an Option<> type
+ raise ValueError(f"Unknown base type: {lsp_type.name}")
+
+
+def _get_enum(name: str, spec: model.LSPModel) -> Optional[model.Enum]:
+ for enum in spec.enumerations:
+ if enum.name == name:
+ return enum
+ return None
+
+
+def get_from_name(
+ name: str, spec: model.LSPModel
+) -> Optional[Union[model.Structure, model.Enum, model.TypeAlias]]:
+ for some in spec.enumerations + spec.structures + spec.typeAliases:
+ if some.name == name:
+ return some
+ return None
+
+
+def get_extended_properties(
+ struct_def: model.Structure, spec: model.LSPModel
+) -> List[model.Property]:
+ properties = [p for p in struct_def.properties]
+ for t in struct_def.extends + struct_def.mixins:
+ if t.kind == "reference":
+ s = get_from_name(t.name, spec)
+ if s:
+ properties += [p for p in s.properties]
+ elif t.kind == "literal":
+ properties += [p for p in t.value.properties]
+ else:
+ raise ValueError(f"Unhandled extension type or mixin type: {t.kind}")
+ unique_props = []
+ for p in properties:
+ if not any((p.name == u.name) for u in unique_props):
+ unique_props.append(p)
+ return sorted(unique_props, key=lambda p: p.name)
+
+
+def _is_str_enum(enum_def: model.Enum) -> bool:
+ return all(isinstance(item.value, str) for item in enum_def.values)
+
+
+def _is_int_enum(enum_def: model.Enum) -> bool:
+ return all(isinstance(item.value, int) for item in enum_def.values)
+
+
+def generate_or_type(
+ type_def: model.LSP_TYPE_SPEC,
+ types: TypeData,
+ spec: model.LSPModel,
+ optional: Optional[bool] = None,
+ name_context: Optional[str] = None,
+) -> str:
+ pass
+
+
+def generate_and_type(
+ type_def: model.LSP_TYPE_SPEC,
+ types: TypeData,
+ spec: model.LSPModel,
+ optional: Optional[bool] = None,
+ name_context: Optional[str] = None,
+) -> str:
+ pass
+
+
+def get_type_name(
+ type_def: model.LSP_TYPE_SPEC,
+ types: TypeData,
+ spec: model.LSPModel,
+ optional: Optional[bool] = None,
+ name_context: Optional[str] = None,
+) -> str:
+ if type_def.kind == "reference":
+ enum_def = _get_enum(type_def.name, spec)
+ if enum_def and enum_def.supportsCustomValues:
+ if _is_str_enum(enum_def):
+ name = f"CustomStringEnum<{enum_def.name}>"
+ elif _is_int_enum(enum_def):
+ name = f"CustomIntEnum<{enum_def.name}>"
+ else:
+ name = type_def.name
+ elif type_def.kind == "array":
+ name = f"Vec<{get_type_name(type_def.element, types, spec)}>"
+ elif type_def.kind == "map":
+ key_type = get_type_name(type_def.key, types, spec)
+ value_type = get_type_name(type_def.value, types, spec)
+ name = f"HashMap<{key_type}, {value_type}>"
+ elif type_def.kind == "base":
+ name = lsp_to_base_types(type_def)
+ elif type_def.kind == "or":
+ sub_set_items = [
+ sub_spec
+ for sub_spec in type_def.items
+ if not (sub_spec.kind == "base" and sub_spec.name == "null")
+ ]
+ sub_types = [get_type_name(sub_spec, types, spec) for sub_spec in sub_set_items]
+ sub_types_str = ", ".join(sub_types)
+ if len(sub_types) >= 2:
+ name = f"OR{len(sub_types)}<{sub_types_str}>"
+ elif len(sub_types) == 1:
+ name = sub_types[0]
+ else:
+ raise ValueError(
+ f"OR type with more than out of range count of subtypes: {type_def}"
+ )
+ optional = optional or is_special(type_def)
+ print(f"Option<{name}>" if optional else name)
+ elif type_def.kind == "literal":
+ name = generate_literal_struct_type(type_def, types, spec, name_context)
+ elif type_def.kind == "stringLiteral":
+ name = "String"
+ # This type in rust requires a custom deserializer that fails if the value is not
+ # one of the allowed values. This should be handled by the caller. This cannot be
+ # handled here because all this does is handle type names.
+ elif type_def.kind == "tuple":
+ optional = optional or is_special(type_def)
+ sub_set_items = [
+ sub_spec
+ for sub_spec in type_def.items
+ if not (sub_spec.kind == "base" and sub_spec.name == "null")
+ ]
+ sub_types = [get_type_name(sub_spec, types, spec) for sub_spec in sub_set_items]
+ sub_types_str = ", ".join(sub_types)
+ if len(sub_types) >= 2:
+ name = f"({sub_types_str})"
+ elif len(sub_types) == 1:
+ name = sub_types[0]
+ else:
+ raise ValueError(f"Invalid number of items for tuple: {type_def}")
+ else:
+ raise ValueError(f"Unknown type kind: {type_def.kind}")
+
+ return f"Option<{name}>" if optional else name
+
+
+def is_special(type_def: model.LSP_TYPE_SPEC) -> bool:
+ if type_def.kind in ["or", "tuple"]:
+ for item in type_def.items:
+ if item.kind == "base" and item.name == "null":
+ return True
+ return False
+
+
+def is_special_property(prop_def: model.Property) -> bool:
+ return is_special(prop_def.type)
+
+
+def is_string_literal_property(prop_def: model.Property) -> bool:
+ return prop_def.type.kind == "stringLiteral"
+
+
+def generate_literal_struct_name(
+ type_def: model.LiteralType,
+ types: TypeData,
+ spec: model.LSPModel,
+ name_context: Optional[str] = None,
+) -> str:
+ ignore_list = ["Struct", "Type", "Kind", "Options", "Params", "Result", "Options"]
+
+ initial_parts = ["Struct"]
+ if name_context:
+ initial_parts += get_parts(name_context)
+
+ optional_props = [p for p in type_def.value.properties if p.optional]
+ required_props = [p for p in type_def.value.properties if not p.optional]
+
+ required_parts = []
+ for property in required_props:
+ for p in get_parts(property.name):
+ if p not in (ignore_list + required_parts):
+ required_parts.append(p)
+
+ optional = (
+ ["Options"] if len(optional_props) == len(type_def.value.properties) else []
+ )
+
+ name_parts = initial_parts
+ name = to_upper_camel_case("_".join(name_parts))
+
+ all_ignore = all(n in ignore_list for n in name_parts)
+ if types.has_name(name) or all_ignore:
+ parts = []
+
+ for r in required_parts:
+ parts.append(r)
+ name = to_upper_camel_case("_".join(initial_parts + parts + optional))
+ if not types.has_name(name):
+ return name
+
+ for i in range(1, 100):
+ end = [f"{i}"] if i > 1 else []
+ name = to_upper_camel_case(
+ "_".join(initial_parts + required_parts + optional + end)
+ )
+ if not types.has_name(name):
+ return name
+ return name
+
+
+def _get_doc(doc: Optional[str]) -> str:
+ if doc:
+ return lines_to_doc_comments(doc.splitlines(keepends=False))
+ return []
+
+
+def generate_property(
+ prop_def: model.Property, types: TypeData, spec: model.LSPModel
+) -> str:
+ prop_name = to_snake_case(prop_def.name)
+ prop_type = get_type_name(
+ prop_def.type, types, spec, prop_def.optional, prop_def.name
+ )
+ optional = (
+ [f'#[serde(skip_serializing_if = "Option::is_none")]']
+ if is_special_property(prop_def) and not prop_def.optional
+ else []
+ )
+
+ if prop_name in ["type"]:
+ prop_name = f"{prop_name}_"
+ if optional:
+ optional = [
+ f'#[serde(rename = "{prop_def.name}", skip_serializing_if = "Option::is_none")]'
+ ]
+ else:
+ optional = [f'#[serde(rename = "{prop_def.name}")]']
+
+ return (
+ _get_doc(prop_def.documentation)
+ + generate_extras(prop_def)
+ + optional
+ + [f"pub {prop_name}: {prop_type},"]
+ + [""]
+ )
+
+
+def get_message_type_name(type_def: Union[model.Notification, model.Request]) -> str:
+ name = fix_lsp_method_name(type_def.method)
+ if isinstance(type_def, model.Notification):
+ return f"{name}Notification"
+ return f"{name}Request"
+
+
+def struct_wrapper(
+ type_def: Union[model.Structure, model.Notification, model.Request],
+ inner: List[str],
+) -> List[str]:
+ if hasattr(type_def, "name"):
+ name = type_def.name
+ else:
+ name = get_message_type_name(type_def)
+ lines = (
+ _get_doc(type_def.documentation)
+ + generate_extras(type_def)
+ + [
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ '#[serde(rename_all = "camelCase")]',
+ f"pub struct {name}",
+ "{",
+ ]
+ )
+ lines += indent_lines(inner)
+ lines += ["}", ""]
+ return lines
+
+
+def type_alias_wrapper(type_def: model.TypeAlias, inner: List[str]) -> List[str]:
+ lines = (
+ _get_doc(type_def.documentation)
+ + generate_extras(type_def)
+ + [
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ "#[serde(untagged)]",
+ f"pub enum {type_def.name}",
+ "{",
+ ]
+ )
+ lines += indent_lines(inner)
+ lines += ["}", ""]
+ return lines
+
+
+def generate_literal_struct_type(
+ type_def: model.LiteralType,
+ types: TypeData,
+ spec: model.LSPModel,
+ name_context: Optional[str] = None,
+) -> None:
+ if len(type_def.value.properties) == 0:
+ return "LSPObject"
+
+ if types.has_id(type_def):
+ return type_def.name
+
+ type_def.name = generate_literal_struct_name(type_def, types, spec, name_context)
+
+ inner = []
+ for prop_def in type_def.value.properties:
+ inner += generate_property(prop_def, types, spec)
+
+ lines = struct_wrapper(type_def, inner)
+ types.add_type_info(type_def, type_def.name, lines)
+ return type_def.name
+
+
+def generate_extras(
+ type_def: Union[
+ model.Enum, model.EnumItem, model.Property, model.TypeAlias, model.Structure
+ ]
+) -> List[str]:
+ extras = []
+ if type_def.deprecated:
+ extras = ["#[deprecated]"]
+ elif type_def.proposed:
+ if type_def.since:
+ extras = [f'#[cfg(feature = "proposed", since = "{type_def.since}")]']
+ else:
+ extras = [f'#[cfg(feature = "proposed")]']
+ # else:
+ # if type_def.since:
+ # extras = [f'#[cfg(feature = "stable", since = "{type_def.since}")]']
+ # else:
+ # extras = [f'#[cfg(feature = "stable")]']
+ return extras
diff --git a/generator/plugins/rust/rust_constants.py b/generator/plugins/rust/rust_constants.py
new file mode 100644
index 0000000..5b7f7a9
--- /dev/null
+++ b/generator/plugins/rust/rust_constants.py
@@ -0,0 +1,2 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
diff --git a/generator/plugins/rust/rust_enum.py b/generator/plugins/rust/rust_enum.py
new file mode 100644
index 0000000..5773d12
--- /dev/null
+++ b/generator/plugins/rust/rust_enum.py
@@ -0,0 +1,51 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import List, Union
+
+import generator.model as model
+
+from .rust_commons import TypeData, generate_extras
+from .rust_lang_utils import indent_lines, lines_to_doc_comments, to_upper_camel_case
+
+
+def _get_enum_docs(enum: Union[model.Enum, model.EnumItem]) -> List[str]:
+ doc = enum.documentation.splitlines(keepends=False) if enum.documentation else []
+ return lines_to_doc_comments(doc)
+
+
+def generate_enum(enum: model.Enum, types: TypeData) -> None:
+ is_int = all(isinstance(item.value, int) for item in enum.values)
+
+ lines = (
+ _get_enum_docs(enum)
+ + generate_extras(enum)
+ + [
+ "#[derive(Serialize, Deserialize, PartialEq, Debug, Eq, Clone)]",
+ f"pub enum {enum.name} " "{",
+ ]
+ )
+
+ for item in enum.values:
+ if is_int:
+ field = [
+ f"{to_upper_camel_case(item.name)} = {item.value},",
+ ]
+ else:
+ field = [
+ f'#[serde(rename = "{item.value}")]',
+ f"{to_upper_camel_case(item.name)},",
+ ]
+
+ lines += indent_lines(
+ _get_enum_docs(item) + generate_extras(item) + field + [""]
+ )
+
+ lines += ["}"]
+
+ types.add_type_info(enum, enum.name, lines)
+
+
+def generate_enums(enums: List[model.Enum], types: TypeData) -> None:
+ for enum in enums:
+ generate_enum(enum, types)
diff --git a/generator/plugins/rust/rust_file_header.py b/generator/plugins/rust/rust_file_header.py
new file mode 100644
index 0000000..4928260
--- /dev/null
+++ b/generator/plugins/rust/rust_file_header.py
@@ -0,0 +1,15 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import List
+
+
+def license_header() -> List[str]:
+ return [
+ "Copyright (c) Microsoft Corporation. All rights reserved.",
+ "Licensed under the MIT License.",
+ ]
+
+
+def package_description() -> List[str]:
+ return ["Language Server Protocol types for Rust generated from LSP specification."]
diff --git a/generator/plugins/rust/rust_lang_utils.py b/generator/plugins/rust/rust_lang_utils.py
new file mode 100644
index 0000000..222d928
--- /dev/null
+++ b/generator/plugins/rust/rust_lang_utils.py
@@ -0,0 +1,73 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+
+import re
+from typing import List
+
+BASIC_LINK_RE = re.compile(r"{@link +(\w+) ([\w ]+)}")
+BASIC_LINK_RE2 = re.compile(r"{@link +(\w+)\.(\w+) ([\w \.`]+)}")
+BASIC_LINK_RE3 = re.compile(r"{@link +(\w+)}")
+BASIC_LINK_RE4 = re.compile(r"{@link +(\w+)\.(\w+)}")
+PARTS_RE = re.compile(r"(([a-z0-9])([A-Z]))")
+DEFAULT_INDENT = " "
+
+
+def lines_to_comments(lines: List[str]) -> List[str]:
+ return ["// " + line for line in lines]
+
+
+def lines_to_doc_comments(lines: List[str]) -> List[str]:
+ doc = []
+ for line in lines:
+ line = BASIC_LINK_RE.sub(r"[\2][\1]", line)
+ line = BASIC_LINK_RE2.sub(r"[\3][`\1::\2`]", line)
+ line = BASIC_LINK_RE3.sub(r"[\1]", line)
+ line = BASIC_LINK_RE4.sub(r"[`\1::\2`]", line)
+ if line.startswith("///"):
+ doc.append(line)
+ else:
+ doc.append("/// " + line)
+ return doc
+
+
+def lines_to_block_comment(lines: List[str]) -> List[str]:
+ return ["/*"] + lines + ["*/"]
+
+
+def get_parts(name: str) -> List[str]:
+ name = name.replace("_", " ")
+ return PARTS_RE.sub(r"\2 \3", name).split()
+
+
+def to_snake_case(name: str) -> str:
+ return "_".join([part.lower() for part in get_parts(name)])
+
+
+def has_upper_case(name: str) -> bool:
+ return any(c.isupper() for c in name)
+
+
+def is_snake_case(name: str) -> bool:
+ return (
+ not name.startswith("_")
+ and not name.endswith("_")
+ and ("_" in name)
+ and not has_upper_case(name)
+ )
+
+
+def to_upper_camel_case(name: str) -> str:
+ return "".join([c.capitalize() for c in get_parts(name)])
+
+
+def to_camel_case(name: str) -> str:
+ parts = get_parts(name)
+ if len(parts) > 1:
+ return parts[0] + "".join([c.capitalize() for c in parts[1:]])
+ else:
+ return parts[0]
+
+
+def indent_lines(lines: List[str], indent: str = DEFAULT_INDENT) -> List[str]:
+ return [f"{indent}{line}" for line in lines]
diff --git a/generator/plugins/rust/rust_structs.py b/generator/plugins/rust/rust_structs.py
new file mode 100644
index 0000000..571cd62
--- /dev/null
+++ b/generator/plugins/rust/rust_structs.py
@@ -0,0 +1,460 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from typing import Dict, Iterable, List, Optional
+
+import generator.model as model
+
+from .rust_commons import (
+ TypeData,
+ fix_lsp_method_name,
+ generate_extras,
+ generate_literal_struct_name,
+ generate_property,
+ get_extended_properties,
+ get_message_type_name,
+ get_type_name,
+ struct_wrapper,
+ type_alias_wrapper,
+)
+from .rust_lang_utils import get_parts, lines_to_doc_comments, to_upper_camel_case
+
+
+def generate_type_aliases(spec: model.LSPModel, types: TypeData) -> None:
+ for alias in spec.typeAliases:
+ if not types.has_id(alias):
+ generate_type_alias(alias, types, spec)
+
+
+def _get_doc(doc: Optional[str]) -> str:
+ if doc:
+ return lines_to_doc_comments(doc.splitlines(keepends=False))
+ return []
+
+
+def _is_some_array_type(items: Iterable[model.LSP_TYPE_SPEC]) -> bool:
+ items_list = list(items)
+ assert len(items_list) == 2
+ item1, item2 = items_list
+
+ if item1.kind == "array" and item2.kind == "reference":
+ return item1.element.kind == "reference" and item1.element.name == item2.name
+
+ if item2.kind == "array" and item1.kind == "reference":
+ return item2.element.kind == "reference" and item2.element.name == item1.name
+ return False
+
+
+def _get_some_array_code(
+ items: Iterable[model.LSP_TYPE_SPEC],
+ types: Dict[str, List[str]],
+ spec: model.LSPModel,
+) -> List[str]:
+ assert _is_some_array_type(items)
+ items_list = list(items)
+ item1 = items_list[0]
+ item2 = items_list[1]
+
+ if item1.kind == "array" and item2.kind == "reference":
+ return [
+ f" One({get_type_name(item2, types, spec)}),",
+ f" Many({get_type_name(item1, types, spec)}),",
+ ]
+
+ if item2.kind == "array" and item1.kind == "reference":
+ return [
+ f" One({get_type_name(item1, types, spec)}),",
+ f" Many({get_type_name(item2, types, spec)}),",
+ ]
+ return []
+
+
+def _get_common_name(items: Iterable[model.LSP_TYPE_SPEC], kind: str) -> List[str]:
+ names = [get_parts(item.name) for item in list(items) if item.kind == kind]
+ if len(names) < 2:
+ return []
+
+ smallest = min(names, key=len)
+ common = []
+ for i in range(len(smallest)):
+ if all(name[i] == smallest[i] for name in names):
+ common.append(smallest[i])
+ return common
+
+
+def _is_all_reference_similar_type(alias: model.TypeAlias) -> bool:
+ items_list = list(alias.type.items)
+ return all(item.kind in ["reference", "base", "literal"] for item in items_list)
+
+
+def _get_all_reference_similar_code(
+ alias: model.TypeAlias,
+ types: TypeData,
+ spec: model.LSPModel,
+) -> List[str]:
+ items = alias.type.items
+ assert _is_all_reference_similar_type(alias)
+
+ # Ensure all literal types have a name
+ for item in list(items):
+ if item.kind == "literal":
+ get_type_name(item, types, spec, None, alias.name)
+
+ common_name = [
+ i.lower()
+ for i in (
+ _get_common_name(items, "reference")
+ + _get_common_name(items, "literal")
+ + ["struct"]
+ )
+ ]
+
+ lines = []
+ value = 0
+ field_names = []
+ for item in list(items):
+ if item.kind == "base" and item.name == "null":
+ lines += ["None,"]
+ field_names += ["None"]
+ elif item.kind == "base":
+ name = _base_to_field_name(item.name)
+ lines += [f"{name}({get_type_name(item, types, spec)}),"]
+ field_names += [name]
+ elif item.kind == "reference":
+ name = [
+ part for part in get_parts(item.name) if part.lower() not in common_name
+ ]
+ if len(name) == 0:
+ name = [f"Value{value}"]
+ value += 1
+ common_name += [n.lower() for n in name]
+ name = to_upper_camel_case("".join(name))
+ field_names += [name]
+ lines += [f"{name}({get_type_name(item, types, spec)}),"]
+ elif item.kind == "literal":
+ name = [
+ part for part in get_parts(item.name) if part.lower() not in common_name
+ ]
+ optional_props = [p for p in item.value.properties if p.optional]
+ required_props = [p for p in item.value.properties if not p.optional]
+
+ # Try picking a name using required props first and then optional props
+ if len(name) == 0:
+ for p in required_props + optional_props:
+ name = [
+ part
+ for part in get_parts(p.name)
+ if part.lower() not in common_name
+ ]
+ if len(name) != 0:
+ break
+
+ # If we still don't have a name, then try picking a name using required props
+ # and then optional props without checking for common name list. But check
+ # that the name is not already used.
+ if len(name) == 0:
+ for p in required_props + optional_props:
+ if to_upper_camel_case(p.name) not in field_names:
+ name = get_parts(p.name)
+ break
+
+ # If we still don't have a name, then just use a generic "Value{int}" as name
+ if len(name) == 0:
+ name = [f"Value{value}"]
+ value += 1
+ common_name += [n.lower() for n in name]
+ name = to_upper_camel_case("".join(name))
+ field_names += [name]
+ lines += [f"{name}({item.name}),"]
+ else:
+ raise ValueError(f"Unknown type {item}")
+ return lines
+
+
+def _base_to_field_name(base_name: str) -> str:
+ if base_name == "boolean":
+ return "Bool"
+ if base_name == "integer":
+ return "Int"
+ if base_name == "decimal":
+ return "Real"
+ if base_name == "string":
+ return "String"
+ if base_name == "uinteger":
+ return "UInt"
+ if base_name == "null":
+ return "None"
+ raise ValueError(f"Unknown base type {base_name}")
+
+
+def _get_literal_field_name(literal: model.LiteralType, types: TypeData) -> str:
+ properties = list(literal.value.properties)
+
+ if len(properties) == 1 and properties[0].kind == "base":
+ return _base_to_field_name(properties[0].name)
+
+ if len(properties) == 1 and properties[0].kind == "reference":
+ return to_upper_camel_case(properties[0].name)
+
+ return generate_literal_struct_name(literal, types)
+
+
+def _generate_or_type_alias(
+ alias_def: model.TypeAlias, types: Dict[str, List[str]], spec: model.LSPModel
+) -> List[str]:
+ inner = []
+
+ if len(alias_def.type.items) == 2 and _is_some_array_type(alias_def.type.items):
+ inner += _get_some_array_code(alias_def.type.items, types, spec)
+ elif _is_all_reference_similar_type(alias_def):
+ inner += _get_all_reference_similar_code(alias_def, types, spec)
+ else:
+ index = 0
+
+ for sub_type in alias_def.type.items:
+ if sub_type.kind == "base" and sub_type.name == "null":
+ inner += [f"None,"]
+ else:
+ inner += [f"ValueType{index}({get_type_name(sub_type, types, spec)}),"]
+ index += 1
+ return type_alias_wrapper(alias_def, inner)
+
+
+def generate_type_alias(
+ alias_def: model.TypeAlias, types: TypeData, spec: model.LSPModel
+) -> List[str]:
+ doc = _get_doc(alias_def.documentation)
+ doc += generate_extras(alias_def)
+
+ lines = []
+ if alias_def.type.kind == "reference":
+ lines += doc
+ lines += [f"pub type {alias_def.name} = {alias_def.type.name};"]
+ elif alias_def.type.kind == "array":
+ lines += doc
+ lines += [
+ f"pub type {alias_def.name} = {get_type_name(alias_def.type, types, spec)};"
+ ]
+ elif alias_def.type.kind == "or":
+ lines += _generate_or_type_alias(alias_def, types, spec)
+ elif alias_def.type.kind == "and":
+ raise ValueError("And type not supported")
+ elif alias_def.type.kind == "literal":
+ lines += doc
+ lines += [
+ f"pub type {alias_def.name} = {get_type_name(alias_def.type, types, spec)};"
+ ]
+ elif alias_def.type.kind == "base":
+ lines += doc
+ lines += [
+ f"pub type {alias_def.name} = {get_type_name(alias_def.type, types, spec)};"
+ ]
+ else:
+ pass
+
+ types.add_type_info(alias_def, alias_def.name, lines)
+
+
+def generate_structures(spec: model.LSPModel, types: TypeData) -> Dict[str, List[str]]:
+ for struct in spec.structures:
+ if not types.has_id(struct):
+ generate_struct(struct, types, spec)
+ return types
+
+
+def generate_struct(
+ struct_def: model.Structure, types: TypeData, spec: model.LSPModel
+) -> None:
+ inner = []
+ for prop_def in get_extended_properties(struct_def, spec):
+ inner += generate_property(prop_def, types, spec)
+
+ lines = struct_wrapper(struct_def, inner)
+ types.add_type_info(struct_def, struct_def.name, lines)
+
+
+def generate_notifications(
+ spec: model.LSPModel, types: TypeData
+) -> Dict[str, List[str]]:
+ for notification in spec.notifications:
+ if not types.has_id(notification):
+ generate_notification(notification, types, spec)
+ return types
+
+
+def required_rpc_properties(name: Optional[str] = None) -> List[model.Property]:
+ props = [
+ model.Property(
+ name="jsonrpc",
+ type=model.BaseType(kind="base", name="string"),
+ optional=False,
+ documentation="The version of the JSON RPC protocol.",
+ ),
+ ]
+ if name:
+ props += [
+ model.Property(
+ name="method",
+ type=model.ReferenceType(kind="reference", name=name),
+ optional=False,
+ documentation="The method to be invoked.",
+ ),
+ ]
+ return props
+
+
+def generate_notification(
+ notification_def: model.Notification, types: TypeData, spec: model.LSPModel
+) -> None:
+ properties = required_rpc_properties("LSPNotificationMethods")
+ if notification_def.params:
+ properties += [
+ model.Property(
+ name="params",
+ type=notification_def.params,
+ )
+ ]
+
+ inner = []
+ for prop_def in properties:
+ inner += generate_property(prop_def, types, spec)
+
+ lines = struct_wrapper(notification_def, inner)
+ types.add_type_info(
+ notification_def, get_message_type_name(notification_def), lines
+ )
+
+
+def generate_required_request_types(
+ spec: model.LSPModel, types: TypeData
+) -> Dict[str, List[str]]:
+ lsp_id = model.TypeAlias(
+ name="LSPId",
+ documentation="An identifier to denote a specific request.",
+ type=model.OrType(
+ kind="or",
+ items=[
+ model.BaseType(kind="base", name="integer"),
+ model.BaseType(kind="base", name="string"),
+ ],
+ ),
+ )
+ generate_type_alias(lsp_id, types, spec)
+
+ lsp_id_optional = model.TypeAlias(
+ name="LSPIdOptional",
+ documentation="An identifier to denote a specific response.",
+ type=model.OrType(
+ kind="or",
+ items=[
+ model.BaseType(kind="base", name="integer"),
+ model.BaseType(kind="base", name="string"),
+ model.BaseType(kind="base", name="null"),
+ ],
+ ),
+ )
+ generate_type_alias(lsp_id_optional, types, spec)
+
+
+def generate_requests(spec: model.LSPModel, types: TypeData) -> Dict[str, List[str]]:
+ generate_required_request_types(spec, types)
+ for request in spec.requests:
+ if not types.has_id(request):
+ generate_request(request, types, spec)
+ generate_response(request, types, spec)
+ generate_partial_result(request, types, spec)
+ generate_registration_options(request, types, spec)
+ return types
+
+
+def generate_request(
+ request_def: model.Request, types: TypeData, spec: model.LSPModel
+) -> None:
+ properties = required_rpc_properties("LSPRequestMethods")
+
+ properties += [
+ model.Property(
+ name="id",
+ type=model.ReferenceType(kind="reference", name="LSPId"),
+ optional=False,
+ documentation="The request id.",
+ )
+ ]
+ if request_def.params:
+ properties += [
+ model.Property(
+ name="params",
+ type=request_def.params,
+ )
+ ]
+
+ inner = []
+ for prop_def in properties:
+ inner += generate_property(prop_def, types, spec)
+
+ lines = struct_wrapper(request_def, inner)
+ types.add_type_info(request_def, get_message_type_name(request_def), lines)
+
+
+def generate_response(
+ request_def: model.Request, types: TypeData, spec: model.LSPModel
+) -> None:
+ properties = required_rpc_properties("LSPRequestMethods")
+ properties += [
+ model.Property(
+ name="id",
+ type=model.ReferenceType(kind="reference", name="LSPIdOptional"),
+ optional=False,
+ documentation="The request id.",
+ )
+ ]
+ if request_def.result:
+ if request_def.result.kind == "base" and request_def.result.name == "null":
+ properties += [
+ model.Property(
+ name="result",
+ type=model.ReferenceType(kind="reference", name="LSPNull"),
+ )
+ ]
+ else:
+ properties += [
+ model.Property(
+ name="result",
+ type=request_def.result,
+ )
+ ]
+ name = fix_lsp_method_name(request_def.method)
+ response_def = model.Structure(
+ name=f"{name}Response",
+ documentation=f"Response to the [{name}Request].",
+ properties=properties,
+ since=request_def.since,
+ deprecated=request_def.deprecated,
+ )
+
+ inner = []
+ for prop_def in properties:
+ inner += generate_property(prop_def, types, spec)
+
+ lines = struct_wrapper(response_def, inner)
+ types.add_type_info(response_def, response_def.name, lines)
+
+
+def generate_partial_result(
+ request_def: model.Request, types: TypeData, spec: model.LSPModel
+) -> None:
+ if not request_def.partialResult:
+ return
+
+ if request_def.partialResult.kind not in ["and", "or"]:
+ return
+
+
+def generate_registration_options(
+ request_def: model.Request, types: TypeData, spec: model.LSPModel
+) -> None:
+ if not request_def.registrationOptions:
+ return
+
+ if request_def.registrationOptions.kind not in ["and", "or"]:
+ return
diff --git a/generator/plugins/rust/rust_utils.py b/generator/plugins/rust/rust_utils.py
new file mode 100644
index 0000000..d817070
--- /dev/null
+++ b/generator/plugins/rust/rust_utils.py
@@ -0,0 +1,68 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import pathlib
+from typing import List
+
+from generator import model
+
+from .rust_commons import TypeData, generate_commons
+from .rust_enum import generate_enums
+from .rust_file_header import license_header
+from .rust_lang_utils import lines_to_comments
+from .rust_structs import (
+ generate_notifications,
+ generate_requests,
+ generate_structures,
+ generate_type_aliases,
+)
+
+PACKAGE_DIR_NAME = "lsprotocol"
+
+
+def generate_from_spec(spec: model.LSPModel, output_dir: str) -> None:
+ code = generate_package_code(spec)
+
+ output_path = pathlib.Path(output_dir, PACKAGE_DIR_NAME)
+ if not output_path.exists():
+ output_path.mkdir(parents=True, exist_ok=True)
+ (output_path / "src").mkdir(parents=True, exist_ok=True)
+
+ for file_name in code:
+ (output_path / file_name).write_text(code[file_name], encoding="utf-8")
+
+
+def generate_package_code(spec: model.LSPModel) -> List[str]:
+ return {
+ "src/lib.rs": generate_lib_rs(spec),
+ }
+
+
+def generate_lib_rs(spec: model.LSPModel) -> List[str]:
+ lines = lines_to_comments(license_header())
+ lines += [
+ "",
+ "// ****** THIS IS A GENERATED FILE, DO NOT EDIT. ******",
+ "// Steps to generate:",
+ "// 1. Checkout https://github.com/microsoft/lsprotocol",
+ "// 2. Install nox: `python -m pip install nox`",
+ "// 3. Run command: `python -m nox --session build_lsp`",
+ "",
+ ]
+ lines += [
+ "use serde::{Serialize, Deserialize};",
+ "use std::collections::HashMap;",
+ "use rust_decimal::Decimal;" "",
+ ]
+
+ type_data = TypeData()
+ generate_commons(spec, type_data)
+ generate_enums(spec.enumerations, type_data)
+
+ generate_type_aliases(spec, type_data)
+ generate_structures(spec, type_data)
+ generate_notifications(spec, type_data)
+ generate_requests(spec, type_data)
+
+ lines += type_data.get_lines()
+ return "\n".join(lines)
diff --git a/generator/plugins/testdata/__init__.py b/generator/plugins/testdata/__init__.py
new file mode 100644
index 0000000..35cc498
--- /dev/null
+++ b/generator/plugins/testdata/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+from .testdata_utils import generate_from_spec as generate
diff --git a/generator/plugins/testdata/testdata_generator.py b/generator/plugins/testdata/testdata_generator.py
new file mode 100644
index 0000000..4d9b850
--- /dev/null
+++ b/generator/plugins/testdata/testdata_generator.py
@@ -0,0 +1,498 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import hashlib
+import itertools
+import json
+import logging
+import re
+from copy import deepcopy
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+import generator.model as model
+
+LSP_MAX_INT = 2**31 - 1
+LSP_MIN_INT = -(2**31)
+
+LSP_MAX_UINT = 2**31 - 1
+LSP_MIN_UINT = 0
+
+LSP_OVER_MAX_INT = LSP_MAX_INT + 1
+LSP_UNDER_MIN_INT = LSP_MIN_INT - 1
+
+LSP_OVER_MAX_UINT = LSP_MAX_UINT + 1
+LSP_UNDER_MIN_UINT = LSP_MIN_UINT - 1
+
+
+def get_hash_from(text: str) -> str:
+ return hashlib.sha256(text.encode("utf-8")).hexdigest()
+
+
+def request_variants(method: str):
+ for id_value in [1, LSP_MAX_INT, LSP_MIN_INT, "string-id-1"]:
+ yield True, {"jsonrpc": "2.0", "id": id_value, "method": method}
+ for id_value in [LSP_OVER_MAX_INT, LSP_UNDER_MIN_INT, 1.0, True, None]:
+ yield False, {"jsonrpc": "2.0", "id": id_value, "method": method}
+ yield False, {"jsonrpc": "2.0", "method": method}
+ yield False, {"jsonrpc": "2.0", "id": 1}
+ yield False, {"id": 1, "method": method}
+
+
+def response_variants():
+ for id_value in [1, LSP_MAX_INT, LSP_MIN_INT, "string-id-1"]:
+ yield True, {"jsonrpc": "2.0", "id": id_value}
+ for id_value in [LSP_OVER_MAX_INT, LSP_UNDER_MIN_INT, 1.0, True, None]:
+ yield False, {"jsonrpc": "2.0", "id": id_value}
+ yield False, {"jsonrpc": "2.0"}
+ yield False, {"id": 1}
+
+
+def notify_variants(method: str):
+ yield True, {"jsonrpc": "2.0", "method": method}
+ yield False, {"jsonrpc": "2.0", "id": 1, "method": method}
+
+
+def _get_struct(name: str, spec: model.LSPModel) -> Optional[model.Structure]:
+ for struct in spec.structures:
+ if struct.name == name:
+ return struct
+ return None
+
+
+def _get_type_alias(name: str, spec: model.LSPModel) -> Optional[model.TypeAlias]:
+ for alias in spec.typeAliases:
+ if alias.name == name:
+ return alias
+ return None
+
+
+def _get_enum(name: str, spec: model.LSPModel) -> Optional[model.Enum]:
+ for enum in spec.enumerations:
+ if enum.name == name:
+ return enum
+ return None
+
+
+class Ignore:
+ pass
+
+
+def extend(arr: List[Any], length: int) -> List[Any]:
+ result = arr * (length // len(arr)) + arr[: length % len(arr)]
+ return result
+
+
+def extend_all(lists: List[List[Any]]) -> List[List[Any]]:
+ max_len = max(len(l) for l in lists)
+ max_len = min(1000, max_len)
+ return [extend(l, max_len) for l in lists]
+
+
+def has_null_base_type(items: List[model.LSP_TYPE_SPEC]) -> bool:
+ return any(item.kind == "base" and item.name == "null" for item in items)
+
+
+def filter_null_base_type(
+ items: List[model.LSP_TYPE_SPEC],
+) -> List[model.LSP_TYPE_SPEC]:
+ return [item for item in items if not (item.kind == "base" and item.name == "null")]
+
+
+def generate_for_base(name: str):
+ if name == "string":
+ yield (True, "some string 🐍🐜")
+ yield (True, "")
+ elif name == "integer":
+ yield (True, 1)
+ yield (True, LSP_MAX_INT)
+ yield (True, LSP_MIN_INT)
+ yield (False, LSP_OVER_MAX_INT)
+ yield (False, LSP_UNDER_MIN_INT)
+ elif name == "decimal":
+ yield (True, 1.0)
+ elif name == "boolean":
+ yield (True, True)
+ yield (True, False)
+ elif name == "null":
+ yield (True, None)
+ elif name == "uinteger":
+ yield (True, 1)
+ yield (True, LSP_MAX_UINT)
+ yield (True, LSP_MIN_UINT)
+ yield (False, LSP_OVER_MAX_UINT)
+ yield (False, LSP_UNDER_MIN_UINT)
+ elif name in ["URI", "DocumentUri"]:
+ yield (True, "file:///some/path")
+ elif name == "RegExp":
+ yield (True, ".*")
+
+
+def generate_for_array(
+ type_def: model.LSP_TYPE_SPEC, spec: model.LSPModel, visited: List[str]
+):
+ generated = list(generate_for_type(type_def, spec, visited))
+ yield (True, [])
+
+ # array with 1 item
+ for valid, value in generated:
+ if not isinstance(value, Ignore):
+ yield (valid, [value])
+
+ # array with 2 items
+ generated = generated[:100] if len(generated) > 100 else generated
+ values = itertools.product(generated, repeat=2)
+ for (valid1, value1), (valid2, value2) in values:
+ if not isinstance(value1, Ignore) and not isinstance(value2, Ignore):
+ yield (valid1 and valid2, [value1, value2])
+
+
+def generate_for_tuple(
+ type_defs: List[model.LSP_TYPE_SPEC], spec: model.LSPModel, visited: List[str]
+):
+ generated = [
+ list(generate_for_type(type_def, spec, visited)) for type_def in type_defs
+ ]
+ products = zip(*extend_all(generated))
+
+ for product in products:
+ is_valid = all(valid for valid, _ in product)
+ values = [value for _, value in product if not isinstance(value, Ignore)]
+ yield (is_valid, tuple(values))
+
+
+def generate_for_map(
+ key_def: model.LSP_TYPE_SPEC,
+ value_def: model.LSP_TYPE_SPEC,
+ spec: model.LSPModel,
+ visited: List[str],
+):
+ key_values = list(generate_for_type(key_def, spec, visited))
+ value_values = list(generate_for_type(value_def, spec, visited))
+
+ for key_valid, key_value in key_values:
+ for value_valid, value_value in value_values:
+ if not (isinstance(key_value, Ignore) or isinstance(value_value, Ignore)):
+ yield (key_valid and value_valid, {key_value: value_value})
+
+
+def get_all_extends(struct_def: model.Structure, spec) -> List[model.Structure]:
+ extends = []
+ for extend in struct_def.extends:
+ extends.append(_get_struct(extend.name, spec))
+ for struct in get_all_extends(_get_struct(extend.name, spec), spec):
+ if not any(struct.name == e.name for e in extends):
+ extends.append(struct)
+ return extends
+
+
+def get_all_properties(struct: model.Structure, spec) -> List[model.Property]:
+ properties = []
+ for prop in struct.properties:
+ properties.append(prop)
+
+ for extend in get_all_extends(struct, spec):
+ for prop in get_all_properties(extend, spec):
+ if not any(prop.name == p.name for p in properties):
+ properties.append(prop)
+
+ if not all(mixin.kind == "reference" for mixin in struct.mixins):
+ raise ValueError(f"Struct {struct.name} has non-reference mixins")
+ for mixin in [_get_struct(mixin.name, spec) for mixin in struct.mixins]:
+ for prop in get_all_properties(mixin, spec):
+ if not any(prop.name == p.name for p in properties):
+ properties.append(prop)
+
+ return properties
+
+
+def generate_for_property(
+ prop: model.Property, spec: model.LSPModel, visited: List[str]
+):
+ if prop.optional:
+ yield (True, Ignore())
+ yield from generate_for_type(prop.type, spec, visited)
+
+
+def generate_for_reference(
+ refname: str,
+ spec: model.LSPModel,
+ visited: List[str],
+):
+ if len([name for name in visited if name == refname]) > 2:
+ return
+
+ ref = _get_struct(refname, spec)
+ alias = _get_type_alias(refname, spec)
+ enum = _get_enum(refname, spec)
+ if ref:
+ properties = get_all_properties(ref, spec)
+ if properties:
+ names = [prop.name for prop in properties]
+ value_variants = [
+ list(generate_for_property(prop, spec, visited)) for prop in properties
+ ]
+
+ products = zip(*extend_all(value_variants))
+ for variants in products:
+ is_valid = all(valid for valid, _ in variants)
+ values = [value for _, value in variants]
+ variant = {
+ name: value
+ for name, value in zip(names, values)
+ if not isinstance(value, Ignore)
+ }
+ yield (is_valid, variant)
+ else:
+ yield (True, {"lspExtension": "some value"})
+ yield (True, dict())
+ elif alias:
+ if refname in ["LSPObject", "LSPAny", "LSPArray"]:
+ yield from (
+ (True, value)
+ for _, value in generate_for_type(alias.type, spec, visited)
+ )
+ else:
+ yield from generate_for_type(alias.type, spec, visited)
+ elif enum:
+ value = enum.values[0].value
+ yield (True, value)
+ if isinstance(value, int):
+ yield (bool(enum.supportsCustomValues), 12345)
+ elif isinstance(value, str):
+ yield (bool(enum.supportsCustomValues), "testCustomValue")
+ else:
+ raise ValueError(f"Unknown reference {refname}")
+
+
+def generate_for_or(
+ type_defs: List[model.LSP_TYPE_SPEC],
+ spec: model.LSPModel,
+ visited: List[str],
+):
+ if has_null_base_type(type_defs):
+ yield (True, None)
+
+ subset = filter_null_base_type(type_defs)
+ generated = [
+ list(generate_for_type(type_def, spec, visited)) for type_def in subset
+ ]
+ for valid, value in itertools.chain(*generated):
+ yield (valid, value)
+
+
+def generate_for_and(
+ type_defs: List[model.LSP_TYPE_SPEC],
+ spec: model.LSPModel,
+ visited: List[str],
+):
+ generated = [
+ list(generate_for_type(type_def, spec, visited)) for type_def in type_defs
+ ]
+ products = zip(*extend_all(generated))
+ for variants in products:
+ is_valid = all(valid for valid, _ in variants)
+ values = [value for _, value in variants]
+ variant = {}
+ for value in values:
+ variant.update(value)
+ yield (is_valid, variant)
+
+
+def generate_for_literal(
+ type_def: model.LiteralType,
+ spec: model.LSPModel,
+ visited: List[str],
+):
+ if type_def.value.properties:
+ names = [prop.name for prop in type_def.value.properties]
+ value_variants = [
+ list(generate_for_type(prop.type, spec, visited))
+ for prop in type_def.value.properties
+ ]
+
+ products = zip(*extend_all(value_variants))
+ for variants in products:
+ is_valid = all(valid for valid, _ in variants)
+ values = [value for _, value in variants]
+ variant = {name: value for name, value in zip(names, values)}
+ yield (is_valid, variant)
+ else:
+ # Literal with no properties are a way to extend LSP spec
+ # see: https://github.com/microsoft/vscode-languageserver-node/issues/997
+ yield (True, {"lspExtension": "some value"})
+ yield (True, dict())
+
+
+def generate_for_type(
+ type_def: model.LSP_TYPE_SPEC,
+ spec: model.LSPModel,
+ visited: List[str],
+):
+ if type_def is None:
+ yield (True, None)
+ elif type_def.kind == "base":
+ yield from generate_for_base(type_def.name)
+ elif type_def.kind == "array":
+ yield from generate_for_array(type_def.element, spec, visited)
+ elif type_def.kind == "reference":
+ yield from generate_for_reference(
+ type_def.name, spec, visited + [type_def.name]
+ )
+ elif type_def.kind == "stringLiteral":
+ yield (True, type_def.value)
+ # yield (False, f"invalid@{type_def.value}")
+ elif type_def.kind == "tuple":
+ yield from generate_for_tuple(type_def.items, spec, visited)
+ elif type_def.kind == "or":
+ yield from generate_for_or(type_def.items, spec, visited)
+ elif type_def.kind == "and":
+ yield from generate_for_and(type_def.items, spec, visited)
+ elif type_def.kind == "literal":
+ yield from generate_for_literal(type_def, spec, visited)
+ elif type_def.kind == "map":
+ yield from generate_for_map(type_def.key, type_def.value, spec, visited)
+
+
+def generate_requests(request: model.Request, spec: model.LSPModel):
+ variants = zip(
+ *extend_all(
+ [
+ list(request_variants(request.method)),
+ list(generate_for_type(request.params, spec, [])),
+ ]
+ )
+ )
+
+ for (valid1, base), (valid2, params) in variants:
+ valid = valid1 and valid2
+ if isinstance(params, Ignore):
+ yield (valid, base)
+ else:
+ message = deepcopy(base)
+ message.update({"params": params})
+ yield (valid1 and valid2, message)
+
+
+def generate_notifications(notify: model.Notification, spec: model.LSPModel) -> None:
+ variants = zip(
+ *extend_all(
+ [
+ list(notify_variants(notify.method)),
+ list(generate_for_type(notify.params, spec, [])),
+ ]
+ )
+ )
+
+ for (valid1, base), (valid2, params) in variants:
+ valid = valid1 and valid2
+ if isinstance(params, Ignore):
+ yield (valid, base)
+ else:
+ message = deepcopy(base)
+ message.update({"params": params})
+ yield (valid1 and valid2, message)
+
+
+RESPONSE_ERROR = model.Structure(
+ **{
+ "name": "ResponseError",
+ "properties": [
+ {
+ "name": "code",
+ "type": {"kind": "base", "name": "integer"},
+ },
+ {
+ "name": "message",
+ "type": {"kind": "base", "name": "string"},
+ },
+ {
+ "name": "data",
+ "type": {"kind": "reference", "name": "LSPObject"},
+ "optional": True,
+ },
+ ],
+ }
+)
+
+
+def generate_responses(request: model.Request, spec: model.LSPModel) -> None:
+ variants = zip(
+ *extend_all(
+ [
+ list(response_variants()),
+ list(generate_for_type(request.result, spec, [])),
+ list(
+ generate_for_type(
+ model.ReferenceType("reference", "ResponseError"), spec, []
+ )
+ ),
+ ]
+ )
+ )
+
+ for (valid1, base), (valid2, result), (valid3, error) in variants:
+ valid = valid1 and valid2 and valid3
+ if isinstance(result, Ignore):
+ yield (valid, base)
+ else:
+ message = deepcopy(base)
+ message.update({"result": result})
+ message.update({"error": error})
+ yield (valid, message)
+
+
+PARTS_RE = re.compile(r"(([a-z0-9])([A-Z]))")
+
+
+def get_parts(name: str) -> List[str]:
+ name = name.replace("_", " ")
+ return PARTS_RE.sub(r"\2 \3", name).split()
+
+
+def to_upper_camel_case(name: str) -> str:
+ return "".join([c.capitalize() for c in get_parts(name)])
+
+
+def lsp_method_to_name(method: str) -> str:
+ if method.startswith("$"):
+ method = method[1:]
+ method = method.replace("/", "_")
+ return to_upper_camel_case(method)
+
+
+def generate(spec: model.LSPModel, logger: logging.Logger):
+ spec.structures.append(RESPONSE_ERROR)
+ testdata = {}
+ for request in spec.requests:
+ counter = 0
+ for valid, value in generate_requests(request, spec):
+ content = json.dumps(value, indent=4, ensure_ascii=False)
+ name = f"{lsp_method_to_name(request.method)}Request-{valid}-{get_hash_from(content)}.json"
+ if name in testdata:
+ continue
+ testdata[name] = content
+ counter += 1
+ logger.info(f"Generated {counter} variants for Request: {request.method}")
+
+ for valid, value in generate_responses(request, spec):
+ content = json.dumps(value, indent=4, ensure_ascii=False)
+ name = f"{lsp_method_to_name(request.method)}Response-{valid}-{get_hash_from(content)}.json"
+ if name in testdata:
+ continue
+ testdata[name] = content
+ counter += 1
+ logger.info(f"Generated {counter} variants for Response: {request.method}")
+
+ for notify in spec.notifications:
+ counter = 0
+ for valid, value in generate_notifications(notify, spec):
+ content = json.dumps(value, indent=4, ensure_ascii=False)
+ name = f"{lsp_method_to_name(notify.method)}Notification-{valid}-{get_hash_from(content)}.json"
+ if name in testdata:
+ continue
+ testdata[name] = content
+ counter += 1
+ logger.info(f"Generated {counter} variants for Notification: {notify.method}")
+
+ logger.info(f"Generated {len(testdata)} test variants")
+ return testdata
diff --git a/generator/plugins/testdata/testdata_utils.py b/generator/plugins/testdata/testdata_utils.py
new file mode 100644
index 0000000..879ce2f
--- /dev/null
+++ b/generator/plugins/testdata/testdata_utils.py
@@ -0,0 +1,34 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import logging
+import pathlib
+from typing import Dict, List
+
+import generator.model as model
+
+from .testdata_generator import generate
+
+logger = logging.getLogger("testdata")
+
+
+def generate_from_spec(spec: model.LSPModel, output_dir: str) -> None:
+ """Generate the code for the given spec."""
+ output = pathlib.Path(output_dir)
+
+ if not output.exists():
+ output.mkdir(parents=True, exist_ok=True)
+
+ cleanup(output)
+ # key is the relative path to the file, value is the content
+ code: Dict[str, str] = generate(spec, logger)
+ for file_name in code:
+ # print file size
+ file = output / file_name
+ file.write_text(code[file_name], encoding="utf-8")
+
+
+def cleanup(output_path: pathlib.Path) -> None:
+ """Cleanup the generated C# files."""
+ for file in output_path.glob("*.json"):
+ file.unlink()