Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"Slug": "compactconversation",
"Description": "Generated from OpenAPI examples.",
"Language": "http",
"Code": "### Compact a conversation. Returns a compacted response object.\n\nLearn when and how to compact long-running conversations in the [conversation state guide](/docs/guides/conversation-state#managing-the-context-window). For ZDR-compatible compaction details, see [Compaction (advanced)](/docs/guides/conversation-state#compaction-advanced).\n# @name Compactconversation\nPOST {{host}}/responses/compact\nAuthorization: Bearer {{token}}\nContent-Type: application/json\nAccept: application/json\n\n{\n \u0022model\u0022: \u0022gpt-5.1\u0022,\n \u0022input\u0022: \u0022string\u0022,\n \u0022previous_response_id\u0022: \u0022resp_123\u0022,\n \u0022instructions\u0022: \u0022string\u0022,\n \u0022prompt_cache_key\u0022: \u0022string\u0022\n}\n\n## Responses\n# 200\n# Description: Success\n# Content-Type: application/json",
"Code": "### Compact a conversation. Returns a compacted response object.\n\nLearn when and how to compact long-running conversations in the [conversation state guide](/docs/guides/conversation-state#managing-the-context-window). For ZDR-compatible compaction details, see [Compaction (advanced)](/docs/guides/conversation-state#compaction-advanced).\n# @name Compactconversation\nPOST {{host}}/responses/compact\nAuthorization: Bearer {{token}}\nContent-Type: application/json\nAccept: application/json\n\n{\n \u0022model\u0022: \u0022gpt-5.1\u0022,\n \u0022input\u0022: \u0022string\u0022,\n \u0022previous_response_id\u0022: \u0022resp_123\u0022,\n \u0022instructions\u0022: \u0022string\u0022,\n \u0022prompt_cache_key\u0022: \u0022string\u0022,\n \u0022prompt_cache_retention\u0022: \u0022in_memory\u0022\n}\n\n## Responses\n# 200\n# Description: Success\n# Content-Type: application/json",
"Format": "http",
"OperationId": "Compactconversation",
"Setup": null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ public partial interface IOpenAiClient
/// <param name="previousResponseId"></param>
/// <param name="instructions"></param>
/// <param name="promptCacheKey"></param>
/// <param name="promptCacheRetention"></param>
/// <param name="requestOptions">Per-request overrides such as headers, query parameters, timeout, retries, and response buffering.</param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
Expand All @@ -37,6 +38,7 @@ public partial interface IOpenAiClient
string? previousResponseId = default,
string? instructions = default,
string? promptCacheKey = default,
global::tryAGI.OpenAI.PromptCacheRetentionEnum? promptCacheRetention = default,
global::tryAGI.OpenAI.AutoSDKRequestOptions? requestOptions = default,
global::System.Threading.CancellationToken cancellationToken = default);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
#nullable enable

namespace tryAGI.OpenAI.JsonConverters
{
/// <inheritdoc />
public sealed class PromptCacheRetentionEnumJsonConverter : global::System.Text.Json.Serialization.JsonConverter<global::tryAGI.OpenAI.PromptCacheRetentionEnum>
{
/// <inheritdoc />
public override global::tryAGI.OpenAI.PromptCacheRetentionEnum Read(
ref global::System.Text.Json.Utf8JsonReader reader,
global::System.Type typeToConvert,
global::System.Text.Json.JsonSerializerOptions options)
{
switch (reader.TokenType)
{
case global::System.Text.Json.JsonTokenType.String:
{
var stringValue = reader.GetString();
if (stringValue != null)
{
return global::tryAGI.OpenAI.PromptCacheRetentionEnumExtensions.ToEnum(stringValue) ?? default;
}

break;
}
case global::System.Text.Json.JsonTokenType.Number:
{
var numValue = reader.GetInt32();
return (global::tryAGI.OpenAI.PromptCacheRetentionEnum)numValue;
}
case global::System.Text.Json.JsonTokenType.Null:
{
return default(global::tryAGI.OpenAI.PromptCacheRetentionEnum);
}
default:
throw new global::System.ArgumentOutOfRangeException(nameof(reader));
}

return default;
}

/// <inheritdoc />
public override void Write(
global::System.Text.Json.Utf8JsonWriter writer,
global::tryAGI.OpenAI.PromptCacheRetentionEnum value,
global::System.Text.Json.JsonSerializerOptions options)
{
writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer));

writer.WriteStringValue(global::tryAGI.OpenAI.PromptCacheRetentionEnumExtensions.ToValueString(value));
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
#nullable enable

namespace tryAGI.OpenAI.JsonConverters
{
/// <inheritdoc />
public sealed class PromptCacheRetentionEnumNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter<global::tryAGI.OpenAI.PromptCacheRetentionEnum?>
{
/// <inheritdoc />
public override global::tryAGI.OpenAI.PromptCacheRetentionEnum? Read(
ref global::System.Text.Json.Utf8JsonReader reader,
global::System.Type typeToConvert,
global::System.Text.Json.JsonSerializerOptions options)
{
switch (reader.TokenType)
{
case global::System.Text.Json.JsonTokenType.String:
{
var stringValue = reader.GetString();
if (stringValue != null)
{
return global::tryAGI.OpenAI.PromptCacheRetentionEnumExtensions.ToEnum(stringValue);
}

break;
}
case global::System.Text.Json.JsonTokenType.Number:
{
var numValue = reader.GetInt32();
return (global::tryAGI.OpenAI.PromptCacheRetentionEnum)numValue;
}
case global::System.Text.Json.JsonTokenType.Null:
{
return default(global::tryAGI.OpenAI.PromptCacheRetentionEnum?);
}
default:
throw new global::System.ArgumentOutOfRangeException(nameof(reader));
}

return default;
}

/// <inheritdoc />
public override void Write(
global::System.Text.Json.Utf8JsonWriter writer,
global::tryAGI.OpenAI.PromptCacheRetentionEnum? value,
global::System.Text.Json.JsonSerializerOptions options)
{
writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer));

if (value == null)
{
writer.WriteNullValue();
}
else
{
writer.WriteStringValue(global::tryAGI.OpenAI.PromptCacheRetentionEnumExtensions.ToValueString(value.Value));
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3781,6 +3781,10 @@ namespace tryAGI.OpenAI

typeof(global::tryAGI.OpenAI.JsonConverters.TokenCountsResourceObjectNullableJsonConverter),

typeof(global::tryAGI.OpenAI.JsonConverters.PromptCacheRetentionEnumJsonConverter),

typeof(global::tryAGI.OpenAI.JsonConverters.PromptCacheRetentionEnumNullableJsonConverter),

typeof(global::tryAGI.OpenAI.JsonConverters.CompactResourceObjectJsonConverter),

typeof(global::tryAGI.OpenAI.JsonConverters.CompactResourceObjectNullableJsonConverter),
Expand Down Expand Up @@ -7246,6 +7250,7 @@ namespace tryAGI.OpenAI
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.TokenCountsBody))]
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.TokenCountsResource))]
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.TokenCountsResourceObject), TypeInfoPropertyName = "TokenCountsResourceObject2")]
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.PromptCacheRetentionEnum), TypeInfoPropertyName = "PromptCacheRetentionEnum2")]
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.CompactResponseMethodPublicBody))]
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.ItemField), TypeInfoPropertyName = "ItemField2")]
[global::System.Text.Json.Serialization.JsonSerializable(typeof(global::tryAGI.OpenAI.ItemFieldDiscriminator))]
Expand Down
Loading
Loading