Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 21 additions & 33 deletions Apps.OpenAI/Actions/BackgroundActions.cs
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Apps.OpenAI.Actions.Base;
using Apps.OpenAI.Actions.Base;
using Apps.OpenAI.Api.Requests;
using Apps.OpenAI.Constants;
using Apps.OpenAI.Dtos;
using Apps.OpenAI.Models.Entities;
using Apps.OpenAI.Models.Requests.Background;
using Apps.OpenAI.Models.Responses.Background;
using Apps.OpenAI.Models.Responses.Batch;
using Apps.OpenAI.Models.Responses.Batch.Error;
using Apps.OpenAI.Models.Responses.Review;
using Apps.OpenAI.Utils;
using Blackbird.Applications.Sdk.Common;
Expand All @@ -26,6 +21,11 @@
using Blackbird.Filters.Xliff.Xliff1;
using Newtonsoft.Json;
using RestSharp;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace Apps.OpenAI.Actions;

Expand Down Expand Up @@ -186,20 +186,13 @@ public async Task<BackgroundContentResponse> DownloadContentFromBackground([Acti
};
}

[Action("Get background result",
Description = "Get the MQM report results from a background batch process")]
[Action("Get background result", Description = "Get the MQM report results from a background batch process")]
public async Task<MqmBackgroundResponse> GetMqmReportFromBackground(
[ActionParameter] BackgroundDownloadRequest request)
{
var batchRequests = await GetBatchRequestsAsync(request.BatchId);
var batchResponse = await GetBatchStatusAsync(request.BatchId);

if (batchResponse.Status != "completed")
{
throw new PluginApplicationException(
$"The batch process is not completed yet. Current status: {batchResponse.Status}");
}


var stream = await fileManagementClient.DownloadAsync(request.TransformationFile);
var content = await Transformation.Parse(stream, request.TransformationFile.Name);
var units = content.GetUnits();
Expand Down Expand Up @@ -230,6 +223,9 @@ public async Task<MqmBackgroundResponse> GetMqmReportFromBackground(
{
mqmResponse = JsonConvert.DeserializeObject<MqmReportResponse>(cleaned)
?? throw new PluginApplicationException($"Invalid JSON MQM report format in batch {bucketIndex}.");

if (mqmResponse == null || mqmResponse.Reports == null)
mqmResponse = new MqmReportResponse();
}
else
{
Expand Down Expand Up @@ -324,8 +320,15 @@ private async Task<List<BatchRequestDto>> GetBatchRequestsAsync(string batchId)
$"The batch process failed. Errors: {batch.Errors}");
}

var fileContentResponse = await UniversalClient.ExecuteWithErrorHandling(
new OpenAIRequest($"/files/{batch.OutputFileId}/content", Method.Get));
if (string.IsNullOrEmpty(batch.OutputFileId) && !string.IsNullOrEmpty(batch.ErrorFileId))
{
var errorRequest = new OpenAIRequest($"/files/{batch.ErrorFileId}/content", Method.Get);
var errorBatchResponse = await UniversalClient.ExecuteWithErrorHandling<BatchItemErrorResponse>(errorRequest);
throw new PluginApplicationException(errorBatchResponse.Response.Body.Error.Message);
}

var fileContentRequest = new OpenAIRequest($"/files/{batch.OutputFileId}/content", Method.Get);
var fileContentResponse = await UniversalClient.ExecuteWithErrorHandling(fileContentRequest);

var batchRequests = new List<BatchRequestDto>();
using var reader = new StringReader(fileContentResponse.Content!);
Expand All @@ -345,19 +348,4 @@ private async Task<BatchResponse> GetBatchStatusAsync(string batchId)
}

#endregion

private class MqmReportResponse
{
[JsonProperty("reports")]
public List<MqmReportEntity> Reports { get; set; } = new();
}

private class MqmReportEntity
{
[JsonProperty("segment_id")]
public string SegmentId { get; set; } = string.Empty;

[JsonProperty("mqm_report")]
public string MqmReport { get; set; } = string.Empty;
}
}
23 changes: 18 additions & 5 deletions Apps.OpenAI/Actions/Base/BaseActions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,15 @@ protected async Task<XliffDocument> DownloadXliffDocumentAsync(FileReference fil
}

protected async Task<ChatCompletionDto> ExecuteChatCompletion(IEnumerable<object> messages, string model, BaseChatRequest input = null, object responseFormat = null)
{
var body = GenerateChatBody(messages, model, input);
return await UniversalClient.ExecuteChatCompletion(body);
}

protected static Dictionary<string, object> GenerateChatBody(
IEnumerable<object> messages,
string model,
BaseChatRequest input = null)
{
var body = new Dictionary<string, object>
{
Expand All @@ -254,15 +263,19 @@ protected async Task<ChatCompletionDto> ExecuteChatCompletion(IEnumerable<object
{ "frequency_penalty", input?.FrequencyPenalty ?? 0 }
};

if (!string.IsNullOrEmpty(model) && !model.Contains("gpt-5"))
bool usesLegacyParams = model.Contains("gpt-3") || model.Contains("gpt-4");
if (usesLegacyParams)
{
body.AppendIfNotNull("temperature", input?.Temperature);
body.AppendIfNotNull("max_tokens", input?.MaximumTokens);
}
else
{
body.AppendIfNotNull("max_completion_tokens", input?.MaximumTokens);
body.AppendIfNotNull("reasoning_effort", input?.ReasoningEffort);
}

body.AppendIfNotNull("max_completion_tokens", input?.MaximumTokens);
body.AppendIfNotNull("reasoning_effort", input?.ReasoningEffort);

return await UniversalClient.ExecuteChatCompletion(body);
return body;
}

protected async Task<string> IdentifySourceLanguage(TextChatModelIdentifier modelIdentifier, string content)
Expand Down
60 changes: 28 additions & 32 deletions Apps.OpenAI/Actions/EditActions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -165,34 +165,38 @@ async Task<IEnumerable<TranslationEntity>> BatchTranslate(IEnumerable<(Unit Unit

[Action("Edit in background",
Description = "Start background editing process for a translated file. This action will return a batch ID that can be used to download the results later.")]
public async Task<BackgroundProcessingResponse> EditInBackground([ActionParameter] StartBackgroundProcessRequest processRequest)
public async Task<BackgroundProcessingResponse> EditInBackground(
[ActionParameter] StartBackgroundProcessRequest processRequest)
{
var stream = await fileManagementClient.DownloadAsync(processRequest.File);
var content = await ErrorHandler.ExecuteWithErrorHandlingAsync(async () =>
await Transformation.Parse(stream, processRequest.File.Name)
);

var units = content.GetUnits();
var segments = units.SelectMany(x => x.Segments);
segments = segments.GetSegmentsForEditing().ToList();
var segments = units.SelectMany(x => x.Segments).GetSegmentsForEditing().ToList();

Glossary? blackbirdGlossary = await ProcessGlossaryFromFile(processRequest.Glossary);
Dictionary<string, List<GlossaryEntry>>? glossaryLookup = null;
Glossary blackbirdGlossary = await ProcessGlossaryFromFile(processRequest.Glossary);
Dictionary<string, List<GlossaryEntry>> glossaryLookup = null;
if (blackbirdGlossary != null)
{
glossaryLookup = CreateGlossaryLookup(blackbirdGlossary);
}

var systemPromptBase = "You are receiving source texts that were translated into target texts. " +
"Review the target texts and respond with edits of the target texts as necessary. " +
"If no edits required, respond with the original target texts. Return the edits in the specified JSON format.";
string systemPromptBase =
"You are receiving source texts that were translated into target texts. " +
"Review the target texts and respond with edits of the target texts as necessary. " +
"If no edits required, respond with the original target texts. " +
"Return the edits in the specified JSON format." +
"The JSON must strictly follow this structure: " +
"{ \"reports\": [ { \"segment_id\": \"(string matching custom_id)\", \"mqm_report\": \"(the edited text)\" } ] }";

if (processRequest.AdditionalInstructions != null)
{
systemPromptBase += $" Additional instructions: {processRequest.AdditionalInstructions}.";
}

if(glossaryLookup != null)
if (glossaryLookup != null)
{
systemPromptBase += " Use the provided glossary to ensure accurate translations of specific terms.";
}
Expand Down Expand Up @@ -232,34 +236,26 @@ await Transformation.Parse(stream, processRequest.File.Name)
}
}

string modelId = UniversalClient.GetModel(processRequest.ModelId);
var chatInput = new BaseChatRequest
{
MaximumTokens = processRequest.MaximumTokens,
Temperature = 0.3f
};

var messages = new object[]
{
new { role = MessageRoles.System, content = systemPromptBase },
new { role = MessageRoles.User, content = userPrompt }
};

var bodyDict = GenerateChatBody(messages, modelId, chatInput);
var batchRequest = new
{
custom_id = bucketIndex.ToString(),
method = "POST",
url = "/v1/chat/completions",
body = new
{
model = UniversalClient.GetModel(processRequest.ModelId),
messages = new object[]
{
new
{
role = "system",
content = systemPromptBase
},
new
{
role = "user",
content = userPrompt
}
},
response_format = ResponseFormats.GetXliffResponseFormat(),
temperature = 0.3,
max_tokens = 4000,
top_p = 1.0,
frequency_penalty = 0.0,
presence_penalty = 0.0
}
body = bodyDict
};

batchRequests.Add(batchRequest);
Expand Down
2 changes: 1 addition & 1 deletion Apps.OpenAI/Apps.OpenAI.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<TargetFramework>net8.0</TargetFramework>
<Product>OpenAI</Product>
<Description>Creating safe artificial general intelligence that benefits all of humanity</Description>
<Version>2.8.15</Version>
<Version>2.8.16</Version>
<AssemblyName>Apps.OpenAI</AssemblyName>
</PropertyGroup>

Expand Down
8 changes: 0 additions & 8 deletions Apps.OpenAI/Dtos/AssistantDto.cs

This file was deleted.

14 changes: 0 additions & 14 deletions Apps.OpenAI/Dtos/AssistantMessageDto.cs

This file was deleted.

11 changes: 11 additions & 0 deletions Apps.OpenAI/Dtos/MqmReportResponse.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
using Newtonsoft.Json;
using System.Collections.Generic;
using Apps.OpenAI.Models.Entities;

namespace Apps.OpenAI.Dtos;

public class MqmReportResponse
{
[JsonProperty("reports")]
public List<MqmReportEntity> Reports { get; set; } = [];
}
12 changes: 12 additions & 0 deletions Apps.OpenAI/Models/Entities/MqmReportEntity.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
using Newtonsoft.Json;

namespace Apps.OpenAI.Models.Entities;

public class MqmReportEntity
{
[JsonProperty("segment_id")]
public string SegmentId { get; set; }

[JsonProperty("mqm_report")]
public string MqmReport { get; set; }
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@ public class StartBackgroundProcessRequest : TextChatModelIdentifier

[Display("Additional instructions", Description = "Additional instructions to guide the translation process.")]
public string? AdditionalInstructions { get; set; }


[Display("Maximum tokens")]
public int? MaximumTokens { get; set; }

public FileReference? Glossary { get; set; }

[Display("Bucket size", Description = "Specify the number of source texts to be translated at once. Default value: 25.")]
Expand Down
4 changes: 4 additions & 0 deletions Apps.OpenAI/Models/Responses/Batch/BatchResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ public class BatchResponse

[JsonProperty("expectedCompletionTime")]
public string ExpectedCompletionTime { get; set; }

[DefinitionIgnore]
[JsonProperty("error_file_id")]
public string ErrorFileId { get; set; } = string.Empty;
}

public class BatchPaginationResponse
Expand Down
9 changes: 9 additions & 0 deletions Apps.OpenAI/Models/Responses/Batch/Error/BatchErrorDetail.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
using Newtonsoft.Json;

namespace Apps.OpenAI.Models.Responses.Batch.Error;

public class BatchErrorDetail
{
[JsonProperty("message")]
public string Message { get; set; }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
using Newtonsoft.Json;

namespace Apps.OpenAI.Models.Responses.Batch.Error;

public class BatchItemErrorResponse
{
[JsonProperty("response")]
public BatchResponseInfo Response { get; set; }
}
12 changes: 12 additions & 0 deletions Apps.OpenAI/Models/Responses/Batch/Error/BatchResponseBody.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
using Newtonsoft.Json;

namespace Apps.OpenAI.Models.Responses.Batch.Error;

public class BatchResponseBody
{
[JsonProperty("error")]
public BatchErrorDetail Error { get; set; }

[JsonProperty("choices")]
public object Choices { get; set; }
}
9 changes: 9 additions & 0 deletions Apps.OpenAI/Models/Responses/Batch/Error/BatchResponseInfo.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
using Newtonsoft.Json;

namespace Apps.OpenAI.Models.Responses.Batch.Error;

public class BatchResponseInfo
{
[JsonProperty("body")]
public BatchResponseBody Body { get; set; }
}
2 changes: 1 addition & 1 deletion Apps.OpenAI/Utils/DictionaryHelper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ namespace Apps.OpenAI.Utils;

public static class DictionaryHelper
{
public static Dictionary<string, object> AppendIfNotNull<T>(this Dictionary<string, object> dictionary, string key, T? value)
public static Dictionary<string, object> AppendIfNotNull<T>(this Dictionary<string, object> dictionary, string key, T value)
{
if (value != null)
dictionary[key] = value;
Expand Down
Loading