-
Notifications
You must be signed in to change notification settings - Fork 291
Expand file tree
/
Copy pathProgram.cs
More file actions
122 lines (104 loc) · 3.18 KB
/
Program.cs
File metadata and controls
122 lines (104 loc) · 3.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
// <complete_code>
// <imports>
using Microsoft.AI.Foundry.Local;
using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
using Microsoft.Extensions.Logging;
// </imports>
// <init>
CancellationToken ct = CancellationToken.None;
var config = new Configuration
{
AppName = "foundry_local_samples",
LogLevel = Microsoft.AI.Foundry.Local.LogLevel.Information
};
using var loggerFactory = LoggerFactory.Create(builder =>
{
builder.SetMinimumLevel(Microsoft.Extensions.Logging.LogLevel.Information);
});
var logger = loggerFactory.CreateLogger<Program>();
// Initialize the singleton instance
await FoundryLocalManager.CreateAsync(config, logger);
var mgr = FoundryLocalManager.Instance;
// Download and register all execution providers.
var currentEp = "";
await mgr.DownloadAndRegisterEpsAsync((epName, percent) =>
{
if (epName != currentEp)
{
if (currentEp != "") Console.WriteLine();
currentEp = epName;
}
Console.Write($"\r {epName.PadRight(30)} {percent,6:F1}%");
});
if (currentEp != "") Console.WriteLine();
// Select and load a model from the catalog
var catalog = await mgr.GetCatalogAsync();
var model = await catalog.GetModelAsync("qwen2.5-0.5b")
?? throw new Exception("Model not found");
await model.DownloadAsync(progress =>
{
Console.Write($"\rDownloading model: {progress:F2}%");
if (progress >= 100f) Console.WriteLine();
});
await model.LoadAsync();
Console.WriteLine("Model loaded and ready.\n");
// Get a chat client
var chatClient = await model.GetChatClientAsync();
// </init>
// <summarization>
var systemPrompt =
"Summarize the following document into concise bullet points. " +
"Focus on the key points and main ideas.";
// <file_reading>
var target = args.Length > 0 ? args[0] : "document.txt";
// </file_reading>
if (Directory.Exists(target))
{
await SummarizeDirectoryAsync(chatClient, target, systemPrompt, ct);
}
else
{
Console.WriteLine($"--- {Path.GetFileName(target)} ---");
await SummarizeFileAsync(chatClient, target, systemPrompt, ct);
}
// </summarization>
// Clean up
await model.UnloadAsync();
Console.WriteLine("\nModel unloaded. Done!");
async Task SummarizeFileAsync(
dynamic client,
string filePath,
string prompt,
CancellationToken token)
{
var fileContent = await File.ReadAllTextAsync(filePath, token);
var messages = new List<ChatMessage>
{
new ChatMessage { Role = "system", Content = prompt },
new ChatMessage { Role = "user", Content = fileContent }
};
var response = await client.CompleteChatAsync(messages, token);
Console.WriteLine(response.Choices[0].Message.Content);
}
async Task SummarizeDirectoryAsync(
dynamic client,
string directory,
string prompt,
CancellationToken token)
{
var txtFiles = Directory.GetFiles(directory, "*.txt")
.OrderBy(f => f)
.ToArray();
if (txtFiles.Length == 0)
{
Console.WriteLine($"No .txt files found in {directory}");
return;
}
foreach (var txtFile in txtFiles)
{
Console.WriteLine($"--- {Path.GetFileName(txtFile)} ---");
await SummarizeFileAsync(client, txtFile, prompt, token);
Console.WriteLine();
}
}
// </complete_code>