From bed398fb39173344fb6fd9af8dc7f685c828bce3 Mon Sep 17 00:00:00 2001 From: Tsanie Lily Date: Wed, 19 Feb 2025 10:03:07 +0800 Subject: [PATCH] implement the first version. --- llm-git-message.sln | 22 ++++ llm-git-message/Program.cs | 176 ++++++++++++++++++++++++++++++++ llm-git-message/lgm.config.json | 5 + llm-git-message/lgm.csproj | 19 ++++ 4 files changed, 222 insertions(+) create mode 100644 llm-git-message.sln create mode 100644 llm-git-message/Program.cs create mode 100644 llm-git-message/lgm.config.json create mode 100644 llm-git-message/lgm.csproj diff --git a/llm-git-message.sln b/llm-git-message.sln new file mode 100644 index 0000000..ea5e71e --- /dev/null +++ b/llm-git-message.sln @@ -0,0 +1,22 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.12.35728.132 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "lgm", "llm-git-message\lgm.csproj", "{E93E28C3-D88F-4228-94A5-43B3CAECC57E}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {E93E28C3-D88F-4228-94A5-43B3CAECC57E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E93E28C3-D88F-4228-94A5-43B3CAECC57E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E93E28C3-D88F-4228-94A5-43B3CAECC57E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E93E28C3-D88F-4228-94A5-43B3CAECC57E}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/llm-git-message/Program.cs b/llm-git-message/Program.cs new file mode 100644 index 0000000..e854a82 --- /dev/null +++ b/llm-git-message/Program.cs @@ -0,0 +1,176 @@ +using System.Diagnostics; +using System.Net; +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; + +namespace Blahblaho.LLM.GitMessage; + +class Program +{ + + static readonly JsonSerializerOptions defaultOptions = new(JsonSerializerDefaults.Web); + + static int Main(string[] args) + { + string configFile = Path.Combine(Path.GetDirectoryName(Environment.CommandLine)!, "lgm.config.json"); + if (!File.Exists(configFile)) + { + Console.WriteLine("Cannot find configuration file."); + return 1; + } + + Configure? configure; + try + { + configure = JsonSerializer.Deserialize(File.ReadAllText(configFile), defaultOptions); + if (configure is null) + { + throw new FormatException("Failed to deserialize."); + } + } + catch (Exception ex) + { + Console.WriteLine($"Invalid configuration file: {ex.Message}"); + return 1; + } + + var pi = new ProcessStartInfo + { + CreateNoWindow = false, + FileName = "git", + Arguments = "diff", + WindowStyle = ProcessWindowStyle.Hidden, + WorkingDirectory = Directory.GetCurrentDirectory(), + RedirectStandardError = true, + RedirectStandardOutput = true + }; + var process = Process.Start(pi); + + var sb = new StringBuilder(); + string? line; + while ((line = process?.StandardOutput.ReadLine()) is not null) + { + sb.AppendLine(line); + } + + var request = J( + ("model", configure.Model), + ("messages", A( + J( + ("role", "user"), + ("content", """ + You are a git commit expert. Based on the provided git diff result, generate a commit message that adheres to the following structure and explanations: + + type prefix (feat, fix, etc.): The description is a concise summary of the change, Match the response language to the dominant language of code comments + The optional body provides additional context or details about the change + The optional footer provides breaking changes or issue references. e.g., Closes #123 + + Requirements: + Merge similar modify and streamline the commit messages to no more than 5. + """)), + J( + ("role", "user"), + ("content", sb.ToString())))), + ("temperature", 0.5), + //("top_p", 0.7), + //("top_k", 50), + ("max_tokens", 4096), + ("stream", true) + ); + + using var client = new HttpClient + { + Timeout = TimeSpan.FromMinutes(2), + DefaultRequestVersion = HttpVersion.Version20, + DefaultRequestHeaders = + { + Authorization = new AuthenticationHeaderValue("Bearer", configure.ApiKey) + } + }; + + try + { + var sw = new Stopwatch(); + sw.Restart(); + + RequestText(client, configure.BaseUrl, request).Wait(); + Console.WriteLine($"\n\ncosts: {sw.Elapsed.TotalSeconds:n1} second(s)"); + } + catch (Exception ex) + { + Console.WriteLine($"Error occurs: {ex}"); + return 2; + } + + return 0; + } + + static async Task RequestText(HttpClient client, string baseUrl, JsonObject request) + { + using var requestMessage = new HttpRequestMessage(HttpMethod.Post, $"{baseUrl}chat/completions") + { + Content = new StringContent(JsonSerializer.Serialize(request), Encoding.UTF8, "application/json") + }; + + using var response = await client.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead); + using var stream = await response.Content.ReadAsStreamAsync(); + using var reader = new StreamReader(stream); + + while (!reader.EndOfStream) + { + var content = await reader.ReadLineAsync(); + if (!string.IsNullOrEmpty(content)) + { + if (content.StartsWith("data: ")) + { + content = content[6..]; + } + if (content == "[DONE]") + { + break; + } + try + { + var data = JsonSerializer.Deserialize(content, defaultOptions); + if (data?["choices"]?[0]?["delta"] is JsonNode message) + { + string? text = null; + string? think = null; + + if (message["content"] is JsonNode cnt) + { + text = cnt.GetValue(); + + Console.Write(text); + } + + if (message["reasoning_content"] is JsonNode tnk) + { + think = tnk.GetValue(); + + Console.Write(think); + } + } + } + catch (Exception ex) + { + Console.WriteLine($"Deserialize (\"{content}\") error: {ex.Message}"); + } + } + } + } + + static JsonObject J(params (string key, JsonNode? value)[] pairs) + { + return new JsonObject(pairs.Select(p => new KeyValuePair(p.key, p.value))); + } + + static JsonArray A(params JsonNode?[] array) + { + return new JsonArray(array); + } +} + +record Configure(string BaseUrl, string Model, string ApiKey); diff --git a/llm-git-message/lgm.config.json b/llm-git-message/lgm.config.json new file mode 100644 index 0000000..b7b2bea --- /dev/null +++ b/llm-git-message/lgm.config.json @@ -0,0 +1,5 @@ +{ + "baseUrl": "https://api.siliconflow.cn/v1/", + "model": "Qwen/Qwen2.5-Coder-32B-Instruct", + "apiKey": "your_api_key" +} \ No newline at end of file diff --git a/llm-git-message/lgm.csproj b/llm-git-message/lgm.csproj new file mode 100644 index 0000000..3d5c9ae --- /dev/null +++ b/llm-git-message/lgm.csproj @@ -0,0 +1,19 @@ + + + + Exe + net9.0 + Blahblaho.LLM.GitMessage + enable + enable + + true + + + + + PreserveNewest + + + +