Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
using NUnit.Framework;
using Shouldly;

namespace EfficientDynamoDb.IntegrationTests.DataPlane.Converters.LowLevel;

[TestFixture]
public class LowLevelCollectionConverterShould
{
private const string KeyPrefix = "effddb_tests-low_lvl_collection_converters";
private DynamoDbContext _context = null!;
private string? _testPartitionKey;
private string? _testSortKey;

[SetUp]
public void SetUp()
{
_context = TestHelper.CreateContext();
}

[TearDown]
public async Task TearDown()
{
if (_testPartitionKey != null && _testSortKey != null)
{
await _context.DeleteItemAsync<TestCollectionConverterEntity>(_testPartitionKey, _testSortKey);
}
}

[Test]
public async Task ApplyConvertersByDefaultForAllTimeTypes()
{
_testPartitionKey = $"{KeyPrefix}-pk";
_testSortKey = $"{KeyPrefix}-sk";

var item = new TestCollectionConverterEntity
{
PartitionKey = _testPartitionKey,
SortKey = _testSortKey,
CompositeKey = new()
{
Part1 = "part1_value",
Part2 = "part2_value",
Part3 = "part3_value"
},
CompositeKey2 = new()
{
Part1 = "part1_value_2",
Part2 = "part2_value_2",
Part3 = "part3_value_2"
}
};

await _context.PutItemAsync(item);

var retrieved = await _context.GetItem<TestCollectionConverterEntity>()
.WithPrimaryKey(_testPartitionKey, _testSortKey)
.WithConsistentRead(true)
.ToItemAsync();

retrieved.ShouldBeEquivalentTo(item);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
using EfficientDynamoDb.Attributes;

namespace EfficientDynamoDb.IntegrationTests.DataPlane.Converters.LowLevel;

[DynamoDbTable(TestHelper.TestTableName)]
public class TestCollectionConverterEntity
{
[DynamoDbProperty("pk", DynamoDbAttributeType.PartitionKey)]
public required string PartitionKey { get; init; }

[DynamoDbProperty("sk", DynamoDbAttributeType.SortKey)]
public required string SortKey { get; init; }

[DynamoDbProperty("compositeKey", typeof(TestCompositeKeyDdbConverter))]
public required TestCompositeKey CompositeKey { get; init; }

[DynamoDbProperty("compositeKey2", typeof(TestCompositeKeyDdbConverter))]
public required TestCompositeKey CompositeKey2 { get; init; }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
using System.Text;
using EfficientDynamoDb.Converters;
using EfficientDynamoDb.DocumentModel;

namespace EfficientDynamoDb.IntegrationTests.DataPlane.Converters.LowLevel;

public class TestCompositeKey
{
public string Part1 { get; set; } = null!;
public string Part2 { get; set; } = null!;
public string Part3 { get; set; } = null!;
}

public class TestCompositeKeyDdbConverter : DdbConverter<TestCompositeKey>
{
public override TestCompositeKey Read(in AttributeValue attributeValue)
{
var list = attributeValue.AsListAttribute();
return new()
{
Part1 = list.Items[0].AsString(),
Part2 = list.Items[1].AsString(),
Part3 = list.Items[2].AsString()
};
}

public override AttributeValue Write(ref TestCompositeKey value)
{
return new ListAttributeValue([
new StringAttributeValue(value.Part1), new StringAttributeValue(value.Part2), new StringAttributeValue(value.Part3)
]);
}

public override TestCompositeKey Read(ref DdbReader reader)
{
ref var jsonReader = ref reader.JsonReaderValue;
jsonReader.Read();
jsonReader.Read();
jsonReader.Read();
var part1 = Encoding.UTF8.GetString(jsonReader.ValueSpan);
jsonReader.Read();

jsonReader.Read();
jsonReader.Read();
jsonReader.Read();
var part2 = Encoding.UTF8.GetString(jsonReader.ValueSpan);
jsonReader.Read();

jsonReader.Read();
jsonReader.Read();
jsonReader.Read();
var part3 = Encoding.UTF8.GetString(jsonReader.ValueSpan);
jsonReader.Read();

// Last end array
jsonReader.Read();

return new()
{
Part1 = part1,
Part2 = part2,
Part3 = part3
};
}
}
3 changes: 2 additions & 1 deletion src/EfficientDynamoDb/Converters/DdbReader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ namespace EfficientDynamoDb.Converters
{
public ref struct DdbReader
{
internal Utf8JsonReader JsonReaderValue;
public Utf8JsonReader JsonReaderValue;

[Obsolete($"This property returns a copy of {nameof(JsonReaderValue)} that won't advance the underlying reader correctly. Use ref to {nameof(JsonReaderValue)} instead.")]
public Utf8JsonReader JsonReader
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
Expand Down
70 changes: 65 additions & 5 deletions website/docs/dev_guide/high_level/converters.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,8 @@ public class CustomIntConverter : DdbConverter<int>
// Efficient zero-allocation JSON to int conversion
public override int Read(ref DdbReader reader)
{
if (!Utf8Parser.TryParse(reader.JsonReader.ValueSpan, out int value, out _))
throw new DdbException($"Couldn't parse int ddb value from '{reader.JsonReader.GetString()}'.");
if (!Utf8Parser.TryParse(reader.JsonReaderValue.ValueSpan, out int value, out _))
throw new DdbException($"Couldn't parse int ddb value from '{reader.JsonReaderValue.GetString()}'.");

return value;
}
Expand All @@ -136,11 +136,71 @@ public class CustomIntConverter : DdbConverter<int>

### JSON reading

When a low-level read is called, `DdbReader.JsonReader` is already pointed to the JSON value. Current attribute type is automatically parsed and can be accessed using `DdbReader.AttributeType` property.
When a low-level read is called, `DdbReader.JsonReaderValue` is already pointed to the JSON value. Current attribute type is automatically parsed and can be accessed using `DdbReader.AttributeType` property.

The `reader.JsonReader.HasValueSequence` is guaranteed to be false at this point, so it's safe to use `reader.JsonReader.ValueSpan` to access the JSON buffer.
The `reader.JsonReaderValue.HasValueSequence` is guaranteed to be false at this point, so it's safe to use `reader.JsonReaderValue.ValueSpan` to access the JSON buffer.

The `DdbReader.JsonReader.Read` method should not be explicitly called unless you are writing a converter for a non-scalar DynamoDB data type - i.e., a map, list or set.
The `ref reader.JsonReaderValue.Read()` method should not be explicitly called unless you are writing a converter for a non-scalar DynamoDB data type - i.e., a map, list or set. When reading non-scalar types, you must use `ref` to access `JsonReaderValue` to ensure the reader advances correctly through the JSON structure.

#### Parsing DynamoDB lists and arrays

By default, EfficientDynamoDb automatically parses DynamoDB collections (lists, sets and maps) into .NET collections and dictionaries.
However, if you need to parse a DynamoDB list (array) into a custom type, you can implement the `Read` method manually.

When parsing a DynamoDB collection, you need to manually advance through the JSON tokens. Assuming the following DDB JSON for a list of strings:

```json
[
{ "S": "value1" },
{ "S": "value2" },
{ "S": "value3" }
]
```

The following converter will parse this list into a separator-delimited string, e.g. `value1#value2#value3`:

```csharp
public class StringListConverter : DdbConverter<string>
{
// High-level methods are skipped for simplicity in this example.

public override string Read(ref DdbReader reader)
{
ref var jsonReader = ref reader.JsonReaderValue;
// jsonReader is pointing to the StartArray token

var result = new List<string>();
while (jsonReader.TokenType != JsonTokenType.EndArray)
{
// Read StartObject token
jsonReader.Read();

// Read property name ("S" for string)
jsonReader.Read();

// Read string value
jsonReader.Read();
result.Add(jsonReader.GetString());

// Read EndObject token
jsonReader.Read();
}

// Read EndArray token
jsonReader.Read();

return string.Join('#', result);
}
}
```

:::info
Always use `ref` when accessing `JsonReaderValue` to call `Read()` or access its properties. This ensures the reader state advances correctly. Using the obsolete `JsonReader` property (which returns a copy) will not advance the underlying reader and will cause parsing errors.
:::

:::caution
Leaving the reader in invalid state can cause parsing errors for the whole entity. It is the responsibility of the converter to ensure the reader is in a valid state after reading.
:::

### JSON writing

Expand Down