Vendor MessagePack 1.9.3 (#806)

This PR vendors `MessagePack 1.9.3`, which removes a number of external assembly dependencies while minimally affecting our current Datadog trace exporting code.

Changes in this pull request:
1. Adding the source of `MessagePack 1.9.3` into the Vendors folder and prefixing the namespaces with Datadog
2. Removing code that relied on the `System.Threading.Tasks.Extensions 4.5.3` NuGet package
3. Removing code that relied on the `System.ValueTuple 4.5.0` NuGet package
4. Removing code that relied on the `System.Reflection.Emit.Lightweight 4.3.0` NuGet package
5. Removing code that relied on the `System.Reflection.Emit 4.3.0` NuGet package
6. Removing unsafe code (which removes dependency on `System.Runtime.CompilerServices.Unsafe.dll`)
7. Revising 3rd party license file

Results:
Reduces the assembly dependencies of `Datadog.Trace` in the following ways:
- All target frameworks
  * MessagePack.dll
  * System.Runtime.CompilerServices.Unsafe.dll
  * System.Threading.Tasks.Extensions.dll
- net45
  * System.ValueTuple.dll
- net461
  * System.ValueTuple.dll
  * Large number of other implementation assemblies when the net461 Tracer had at least one netstandard.dll dependency
- netstandard2.0
  * System.Runtime.Serialization.Primitives.dll
This commit is contained in:
Zach Montoya 2020-07-27 14:58:39 -07:00 committed by GitHub
parent f003c9a167
commit ec5f59eaab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
77 changed files with 22473 additions and 683 deletions

View File

@ -1,6 +1,6 @@
Component,Origin,License,Copyright
opentracing-csharp,https://github.com/opentracing/opentracing-csharp,MIT,Copyright 2016-2017 The OpenTracing Authors
msgpack-cli,https://github.com/msgpack/msgpack-cli,Apache-2.0,"Copyright 2010-2016 FUJIWARA, Yusuke, all rights reserved."
MessagePack-CSharp,https://github.com/neuecc/MessagePack-CSharp,MIT,Copyright (c) 2017 Yoshifumi Kawai and contributors
liblog,https://github.com/damianh/LibLog,MIT,Copyright (C) 2011-2017 Damian Hickey
dotnet/runtime,https://github.com/dotnet/runtime,MIT,Copyright (c) .NET Foundation and contributors. All rights reserved.
clr-samples,https://github.com/Microsoft/clr-samples,MIT,Copyright (c) .NET Foundation and contributors. All rights reserved.

1 Component Origin License Copyright
2 opentracing-csharp https://github.com/opentracing/opentracing-csharp MIT Copyright 2016-2017 The OpenTracing Authors
3 msgpack-cli MessagePack-CSharp https://github.com/msgpack/msgpack-cli https://github.com/neuecc/MessagePack-CSharp Apache-2.0 MIT Copyright 2010-2016 FUJIWARA, Yusuke, all rights reserved. Copyright (c) 2017 Yoshifumi Kawai and contributors
4 liblog https://github.com/damianh/LibLog MIT Copyright (C) 2011-2017 Damian Hickey
5 dotnet/runtime https://github.com/dotnet/runtime MIT Copyright (c) .NET Foundation and contributors. All rights reserved.
6 clr-samples https://github.com/Microsoft/clr-samples MIT Copyright (c) .NET Foundation and contributors. All rights reserved.

View File

@ -25,11 +25,6 @@
Source="$(var.TracerHomeDirectory)\net45\Datadog.Trace.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_GAC_MessagePack.dll"
Source="$(var.TracerHomeDirectory)\net45\MessagePack.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_GAC_Newtonsoft.Json.dll"
Source="$(var.TracerHomeDirectory)\net45\Newtonsoft.Json.dll"
@ -45,26 +40,11 @@
Source="$(var.TracerHomeDirectory)\net45\System.Diagnostics.DiagnosticSource.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_GAC_System.Runtime.CompilerServices.Unsafe.dll"
Source="$(var.TracerHomeDirectory)\net45\System.Runtime.CompilerServices.Unsafe.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_GAC_System.Runtime.InteropServices.RuntimeInformation.dll"
Source="$(var.TracerHomeDirectory)\net45\System.Runtime.InteropServices.RuntimeInformation.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_GAC_System.Threading.Tasks.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net45\System.Threading.Tasks.Extensions.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_GAC_System.ValueTuple.dll"
Source="$(var.TracerHomeDirectory)\net45\System.ValueTuple.dll"
KeyPath="yes" Checksum="yes" Assembly=".net"/>
</Component>
</ComponentGroup>
</Fragment>
</Wix>

View File

@ -25,11 +25,6 @@
Source="$(var.TracerHomeDirectory)\net45\Datadog.Trace.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_MessagePack.dll"
Source="$(var.TracerHomeDirectory)\net45\MessagePack.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_Newtonsoft.Json.dll"
Source="$(var.TracerHomeDirectory)\net45\Newtonsoft.Json.dll"
@ -45,26 +40,11 @@
Source="$(var.TracerHomeDirectory)\net45\System.Diagnostics.DiagnosticSource.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_System.Runtime.CompilerServices.Unsafe.dll"
Source="$(var.TracerHomeDirectory)\net45\System.Runtime.CompilerServices.Unsafe.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_System.Runtime.InteropServices.RuntimeInformation.dll"
Source="$(var.TracerHomeDirectory)\net45\System.Runtime.InteropServices.RuntimeInformation.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_System.Threading.Tasks.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net45\System.Threading.Tasks.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net45_System.ValueTuple.dll"
Source="$(var.TracerHomeDirectory)\net45\System.ValueTuple.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
</ComponentGroup>
</Fragment>
</Wix>

View File

@ -25,21 +25,6 @@
Source="$(var.TracerHomeDirectory)\net461\Datadog.Trace.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_MessagePack.dll"
Source="$(var.TracerHomeDirectory)\net461\MessagePack.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_Microsoft.Win32.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\Microsoft.Win32.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_netstandard.dll"
Source="$(var.TracerHomeDirectory)\net461\netstandard.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_Newtonsoft.Json.dll"
Source="$(var.TracerHomeDirectory)\net461\Newtonsoft.Json.dll"
@ -50,491 +35,16 @@
Source="$(var.TracerHomeDirectory)\net461\Sigil.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.AppContext.dll"
Source="$(var.TracerHomeDirectory)\net461\System.AppContext.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Collections.Concurrent.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Collections.Concurrent.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Collections.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Collections.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Collections.NonGeneric.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Collections.NonGeneric.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Collections.Specialized.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Collections.Specialized.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.ComponentModel.dll"
Source="$(var.TracerHomeDirectory)\net461\System.ComponentModel.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.ComponentModel.EventBasedAsync.dll"
Source="$(var.TracerHomeDirectory)\net461\System.ComponentModel.EventBasedAsync.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.ComponentModel.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.ComponentModel.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.ComponentModel.TypeConverter.dll"
Source="$(var.TracerHomeDirectory)\net461\System.ComponentModel.TypeConverter.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Console.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Console.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Data.Common.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Data.Common.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.Contracts.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.Contracts.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.Debug.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.Debug.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.DiagnosticSource.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.DiagnosticSource.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.FileVersionInfo.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.FileVersionInfo.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.Process.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.Process.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.StackTrace.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.StackTrace.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.TextWriterTraceListener.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.TextWriterTraceListener.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.Tools.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.Tools.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.TraceSource.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.TraceSource.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Diagnostics.Tracing.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Diagnostics.Tracing.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Drawing.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Drawing.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Dynamic.Runtime.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Dynamic.Runtime.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Globalization.Calendars.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Globalization.Calendars.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Globalization.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Globalization.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Globalization.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Globalization.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.Compression.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.Compression.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.Compression.ZipFile.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.Compression.ZipFile.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.FileSystem.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.FileSystem.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.FileSystem.DriveInfo.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.FileSystem.DriveInfo.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.FileSystem.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.FileSystem.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.FileSystem.Watcher.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.FileSystem.Watcher.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.IsolatedStorage.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.IsolatedStorage.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.MemoryMappedFiles.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.MemoryMappedFiles.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.Pipes.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.Pipes.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.IO.UnmanagedMemoryStream.dll"
Source="$(var.TracerHomeDirectory)\net461\System.IO.UnmanagedMemoryStream.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Linq.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Linq.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Linq.Expressions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Linq.Expressions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Linq.Parallel.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Linq.Parallel.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Linq.Queryable.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Linq.Queryable.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.Http.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.Http.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.NameResolution.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.NameResolution.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.NetworkInformation.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.NetworkInformation.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.Ping.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.Ping.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.Requests.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.Requests.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.Security.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.Security.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.Sockets.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.Sockets.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.WebHeaderCollection.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.WebHeaderCollection.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.WebSockets.Client.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.WebSockets.Client.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Net.WebSockets.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Net.WebSockets.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.ObjectModel.dll"
Source="$(var.TracerHomeDirectory)\net461\System.ObjectModel.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Reflection.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Reflection.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Reflection.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Reflection.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Reflection.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Reflection.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Resources.Reader.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Resources.Reader.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Resources.ResourceManager.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Resources.ResourceManager.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Resources.Writer.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Resources.Writer.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.CompilerServices.Unsafe.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.CompilerServices.Unsafe.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.CompilerServices.VisualC.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.CompilerServices.VisualC.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Handles.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Handles.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.InteropServices.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.InteropServices.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.InteropServices.RuntimeInformation.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.InteropServices.RuntimeInformation.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Numerics.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Numerics.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Serialization.Formatters.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Serialization.Formatters.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Serialization.Json.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Serialization.Json.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Serialization.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Serialization.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Runtime.Serialization.Xml.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Runtime.Serialization.Xml.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Claims.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Claims.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Cryptography.Algorithms.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Cryptography.Algorithms.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Cryptography.Csp.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Cryptography.Csp.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Cryptography.Encoding.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Cryptography.Encoding.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Cryptography.Primitives.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Cryptography.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Cryptography.X509Certificates.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Cryptography.X509Certificates.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.Principal.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.Principal.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Security.SecureString.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Security.SecureString.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Text.Encoding.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Text.Encoding.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Text.Encoding.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Text.Encoding.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Text.RegularExpressions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Text.RegularExpressions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.Overlapped.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.Overlapped.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.Tasks.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.Tasks.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.Tasks.Extensions.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.Tasks.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.Tasks.Parallel.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.Tasks.Parallel.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.Thread.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.Thread.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.ThreadPool.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.ThreadPool.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Threading.Timer.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Threading.Timer.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.ValueTuple.dll"
Source="$(var.TracerHomeDirectory)\net461\System.ValueTuple.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Xml.ReaderWriter.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Xml.ReaderWriter.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Xml.XDocument.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Xml.XDocument.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Xml.XmlDocument.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Xml.XmlDocument.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Xml.XmlSerializer.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Xml.XmlSerializer.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Xml.XPath.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Xml.XPath.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="net461_System.Xml.XPath.XDocument.dll"
Source="$(var.TracerHomeDirectory)\net461\System.Xml.XPath.XDocument.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
</ComponentGroup>
</Fragment>
</Wix>

View File

@ -20,11 +20,6 @@
Source="$(var.TracerHomeDirectory)\netstandard2.0\Datadog.Trace.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_MessagePack.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\MessagePack.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_Microsoft.CSharp.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\Microsoft.CSharp.dll"
@ -80,31 +75,16 @@
Source="$(var.TracerHomeDirectory)\netstandard2.0\System.Reflection.TypeExtensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_System.Runtime.CompilerServices.Unsafe.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\System.Runtime.CompilerServices.Unsafe.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_System.Runtime.InteropServices.RuntimeInformation.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\System.Runtime.InteropServices.RuntimeInformation.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_System.Runtime.Serialization.Primitives.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\System.Runtime.Serialization.Primitives.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_System.Threading.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\System.Threading.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
<Component Win64="$(var.Win64)">
<File Id="netstandard20_System.Threading.Tasks.Extensions.dll"
Source="$(var.TracerHomeDirectory)\netstandard2.0\System.Threading.Tasks.Extensions.dll"
KeyPath="yes" Checksum="yes"/>
</Component>
</ComponentGroup>
</Fragment>
</Wix>

View File

@ -9,7 +9,6 @@ using Datadog.Trace.DogStatsd;
using Datadog.Trace.Logging;
using Datadog.Trace.PlatformHelpers;
using Datadog.Trace.Vendors.StatsdClient;
using MessagePack;
using Newtonsoft.Json;
namespace Datadog.Trace.Agent

View File

@ -5,7 +5,7 @@ using System.Net;
using System.Text;
using System.Threading.Tasks;
using Datadog.Trace.Agent.MessagePack;
using MessagePack;
using Datadog.Trace.Vendors.MessagePack;
namespace Datadog.Trace.Agent
{
@ -37,11 +37,7 @@ namespace Datadog.Trace.Agent
_request.ContentType = "application/msgpack";
using (var requestStream = await _request.GetRequestStreamAsync().ConfigureAwait(false))
{
#if MESSAGEPACK_1_9
await MessagePackSerializer.SerializeAsync(requestStream, traces, formatterResolver).ConfigureAwait(false);
#elif MESSAGEPACK_2_1
await MessagePackSerializer.SerializeAsync(requestStream, traces, formatterResolver.Options).ConfigureAwait(false);
#endif
}
var httpWebResponse = (HttpWebResponse)await _request.GetResponseAsync().ConfigureAwait(false);

View File

@ -1,5 +1,5 @@
using MessagePack;
using MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack;
using Datadog.Trace.Vendors.MessagePack.Formatters;
namespace Datadog.Trace.Agent.MessagePack
{
@ -10,16 +10,8 @@ namespace Datadog.Trace.Agent.MessagePack
public FormatterResolverWrapper(IFormatterResolver resolver)
{
_resolver = resolver;
#if MESSAGEPACK_2_1
Options = MessagePackSerializerOptions.Standard.WithResolver(resolver);
#endif
}
#if MESSAGEPACK_2_1
public MessagePackSerializerOptions Options { get; }
#endif
public IMessagePackFormatter<T> GetFormatter<T>()
{
return _resolver.GetFormatter<T>();

View File

@ -1,6 +1,6 @@
using MessagePack;
using MessagePack.Formatters;
using MessagePack.Resolvers;
using Datadog.Trace.Vendors.MessagePack;
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Resolvers;
namespace Datadog.Trace.Agent.MessagePack
{

View File

@ -1,106 +0,0 @@
#if MESSAGEPACK_2_1
using System;
using Datadog.Trace.ExtensionMethods;
using MessagePack;
using MessagePack.Formatters;
namespace Datadog.Trace.Agent.MessagePack
{
internal class SpanMessagePackFormatter : IMessagePackFormatter<Span>
{
public void Serialize(ref MessagePackWriter writer, Span value, MessagePackSerializerOptions options)
{
// First, pack array length (or map length).
// It should be the number of members of the object to be serialized.
var len = 8;
if (value.Context.ParentId != null)
{
len++;
}
if (value.Error)
{
len++;
}
if (value.Tags != null)
{
len++;
}
if (value.Metrics != null)
{
len++;
}
writer.WriteMapHeader(len);
writer.Write("trace_id");
writer.Write(value.Context.TraceId);
writer.Write("span_id");
writer.Write(value.Context.SpanId);
writer.Write("name");
writer.Write(value.OperationName);
writer.Write("resource");
writer.Write(value.ResourceName);
writer.Write("service");
writer.Write(value.ServiceName);
writer.Write("type");
writer.Write(value.Type);
writer.Write("start");
writer.Write(value.StartTime.ToUnixTimeNanoseconds());
writer.Write("duration");
writer.Write(value.Duration.ToNanoseconds());
if (value.Context.ParentId != null)
{
writer.Write("parent_id");
writer.Write((ulong)value.Context.ParentId);
}
if (value.Error)
{
writer.Write("error");
writer.Write(1);
}
if (value.Tags != null)
{
writer.Write("meta");
writer.WriteMapHeader(value.Tags.Count);
foreach (var pair in value.Tags)
{
writer.Write(pair.Key);
writer.Write(pair.Value);
}
}
if (value.Metrics != null)
{
writer.Write("metrics");
writer.WriteMapHeader(value.Metrics.Count);
foreach (var pair in value.Metrics)
{
writer.Write(pair.Key);
writer.Write(pair.Value);
}
}
}
public Span Deserialize(ref MessagePackReader reader, MessagePackSerializerOptions options)
{
throw new NotImplementedException();
}
}
}
#endif

View File

@ -1,8 +1,7 @@
#if MESSAGEPACK_1_9
using System;
using Datadog.Trace.ExtensionMethods;
using MessagePack;
using MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack;
using Datadog.Trace.Vendors.MessagePack.Formatters;
namespace Datadog.Trace.Agent.MessagePack
{
@ -107,4 +106,3 @@ namespace Datadog.Trace.Agent.MessagePack
}
}
}
#endif

View File

@ -1,11 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<!-- NuGet -->
<Version>1.18.4-prerelease</Version>
<Title>Datadog APM</Title>
<Description>Manual instrumentation library for Datadog APM</Description>
<DefineConstants>$(DefineConstants);MESSAGEPACK_1_9</DefineConstants>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' != 'netstandard2.0' ">
@ -27,7 +26,6 @@
<ItemGroup>
<PackageReference Include="LibLog" Version="5.0.6" PrivateAssets="All" />
<PackageReference Include="MessagePack" Version="1.9.11" />
<PackageReference Include="Microsoft.CSharp" Version="4.5.0" />
<PackageReference Include="Newtonsoft.Json" Version="12.0.1" />
<PackageReference Include="System.Diagnostics.DiagnosticSource" Version="4.4.1" />

View File

@ -0,0 +1,79 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack
{
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct, AllowMultiple = false, Inherited = true)]
internal class MessagePackObjectAttribute : Attribute
{
public bool KeyAsPropertyName { get; private set; }
public MessagePackObjectAttribute(bool keyAsPropertyName = false)
{
this.KeyAsPropertyName = keyAsPropertyName;
}
}
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = false, Inherited = true)]
internal class KeyAttribute : Attribute
{
public int? IntKey { get; private set; }
public string StringKey { get; private set; }
public KeyAttribute(int x)
{
this.IntKey = x;
}
public KeyAttribute(string x)
{
this.StringKey = x;
}
}
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = false, Inherited = true)]
internal class IgnoreMemberAttribute : Attribute
{
}
[AttributeUsage(AttributeTargets.Interface | AttributeTargets.Class, AllowMultiple = true, Inherited = false)]
internal class UnionAttribute : Attribute
{
public int Key { get; private set; }
public Type SubType { get; private set; }
public UnionAttribute(int key, Type subType)
{
this.Key = key;
this.SubType = subType;
}
}
[AttributeUsage(AttributeTargets.Constructor, AllowMultiple = false, Inherited = true)]
internal class SerializationConstructorAttribute : Attribute
{
}
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Interface | AttributeTargets.Enum | AttributeTargets.Field | AttributeTargets.Property, AllowMultiple = false, Inherited = true)]
internal class MessagePackFormatterAttribute : Attribute
{
public Type FormatterType { get; private set; }
public object[] Arguments { get; private set; }
public MessagePackFormatterAttribute(Type formatterType)
{
this.FormatterType = formatterType;
}
public MessagePackFormatterAttribute(Type formatterType, params object[] arguments)
{
this.FormatterType = formatterType;
this.Arguments = arguments;
}
}
}

View File

@ -0,0 +1,40 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// <auto-generated />
#if !NETCOREAPP
using System.Runtime.CompilerServices;
// Some routines inspired by the Stanford Bit Twiddling Hacks by Sean Eron Anderson:
// http://graphics.stanford.edu/~seander/bithacks.html
namespace System.Numerics
{
/// <summary>
/// Utility methods for intrinsic bit-twiddling operations.
/// The methods use hardware intrinsics when available on the underlying platform,
/// otherwise they use optimized software fallbacks.
/// </summary>
internal static class BitOperations
{
/// <summary>
/// Rotates the specified value left by the specified number of bits.
/// Similar in behavior to the x86 instruction ROL.
/// </summary>
/// <param name="value">The value to rotate.</param>
/// <param name="offset">The number of bits to rotate by.
/// Any value outside the range [0..31] is treated as congruent mod 32.</param>
/// <returns>The rotated value.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint RotateLeft(uint value, int offset)
=> (value << offset) | (value >> (32 - offset));
}
}
#endif

View File

@ -0,0 +1,122 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Runtime.InteropServices;
namespace Datadog.Trace.Vendors.MessagePack
{
// safe accessor of Single/Double's underlying byte.
// This code is borrowed from MsgPack-Cli https://github.com/msgpack/msgpack-cli
[StructLayout(LayoutKind.Explicit)]
internal struct Float32Bits
{
[FieldOffset(0)]
public readonly float Value;
[FieldOffset(0)]
public readonly Byte Byte0;
[FieldOffset(1)]
public readonly Byte Byte1;
[FieldOffset(2)]
public readonly Byte Byte2;
[FieldOffset(3)]
public readonly Byte Byte3;
public Float32Bits(float value)
{
this = default(Float32Bits);
this.Value = value;
}
public Float32Bits(byte[] bigEndianBytes, int offset)
{
this = default(Float32Bits);
if (BitConverter.IsLittleEndian)
{
this.Byte0 = bigEndianBytes[offset + 3];
this.Byte1 = bigEndianBytes[offset + 2];
this.Byte2 = bigEndianBytes[offset + 1];
this.Byte3 = bigEndianBytes[offset];
}
else
{
this.Byte0 = bigEndianBytes[offset];
this.Byte1 = bigEndianBytes[offset + 1];
this.Byte2 = bigEndianBytes[offset + 2];
this.Byte3 = bigEndianBytes[offset + 3];
}
}
}
[StructLayout(LayoutKind.Explicit)]
internal struct Float64Bits
{
[FieldOffset(0)]
public readonly double Value;
[FieldOffset(0)]
public readonly Byte Byte0;
[FieldOffset(1)]
public readonly Byte Byte1;
[FieldOffset(2)]
public readonly Byte Byte2;
[FieldOffset(3)]
public readonly Byte Byte3;
[FieldOffset(4)]
public readonly Byte Byte4;
[FieldOffset(5)]
public readonly Byte Byte5;
[FieldOffset(6)]
public readonly Byte Byte6;
[FieldOffset(7)]
public readonly Byte Byte7;
public Float64Bits(double value)
{
this = default(Float64Bits);
this.Value = value;
}
public Float64Bits(byte[] bigEndianBytes, int offset)
{
this = default(Float64Bits);
if (BitConverter.IsLittleEndian)
{
this.Byte0 = bigEndianBytes[offset + 7];
this.Byte1 = bigEndianBytes[offset + 6];
this.Byte2 = bigEndianBytes[offset + 5];
this.Byte3 = bigEndianBytes[offset + 4];
this.Byte4 = bigEndianBytes[offset + 3];
this.Byte5 = bigEndianBytes[offset + 2];
this.Byte6 = bigEndianBytes[offset + 1];
this.Byte7 = bigEndianBytes[offset];
}
else
{
this.Byte0 = bigEndianBytes[offset];
this.Byte1 = bigEndianBytes[offset + 1];
this.Byte2 = bigEndianBytes[offset + 2];
this.Byte3 = bigEndianBytes[offset + 3];
this.Byte4 = bigEndianBytes[offset + 4];
this.Byte5 = bigEndianBytes[offset + 5];
this.Byte6 = bigEndianBytes[offset + 6];
this.Byte7 = bigEndianBytes[offset + 7];
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,57 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
// Copyright (c) All contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Linq.Expressions;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
/// <summary>
/// Provides general helpers for creating collections (including dictionaries).
/// </summary>
/// <typeparam name="TCollection">The concrete type of collection to create.</typeparam>
/// <typeparam name="TEqualityComparer">The type of equality comparer that we would hope to pass into the collection's constructor.</typeparam>
internal static class CollectionHelpers<TCollection, TEqualityComparer>
where TCollection : new()
{
/// <summary>
/// The delegate that will create the collection, if the typical (int count, IEqualityComparer{T} equalityComparer) constructor was found.
/// </summary>
private static Func<int, TEqualityComparer, TCollection> collectionCreator;
/// <summary>
/// Initializes static members of the <see cref="CollectionHelpers{TCollection, TEqualityComparer}"/> class.
/// </summary>
/// <remarks>
/// Initializes a delegate that is optimized to create a collection of a given size and using the given equality comparer, if possible.
/// </remarks>
static CollectionHelpers()
{
var ctor = typeof(TCollection).GetTypeInfo().GetConstructor(new Type[] { typeof(int), typeof(TEqualityComparer) });
if (ctor != null)
{
ParameterExpression param1 = Expression.Parameter(typeof(int), "count");
ParameterExpression param2 = Expression.Parameter(typeof(TEqualityComparer), "equalityComparer");
NewExpression body = Expression.New(ctor, param1, param2);
collectionCreator = Expression.Lambda<Func<int, TEqualityComparer, TCollection>>(body, param1, param2).Compile();
}
}
/// <summary>
/// Initializes a new instance of the <typeparamref name="TCollection"/> collection.
/// </summary>
/// <param name="count">The number of elements the collection should be prepared to receive.</param>
/// <param name="equalityComparer">The equality comparer to initialize the collection with.</param>
/// <returns>The newly initialized collection.</returns>
/// <remarks>
/// Use of the <paramref name="count"/> and <paramref name="equalityComparer"/> are a best effort.
/// If we can't find a constructor on the collection in the expected shape, we'll just instantiate the collection with its default constructor.
/// </remarks>
internal static TCollection CreateHashCollection(int count, TEqualityComparer equalityComparer) => collectionCreator != null ? collectionCreator.Invoke(count, equalityComparer) : new TCollection();
}
}

View File

@ -0,0 +1,457 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
#if NETSTANDARD || NETFRAMEWORK
using System.Collections.Concurrent;
#endif
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
#if NETSTANDARD || NETFRAMEWORK
// unfortunately, can't use IDictionary<KVP> because supports IReadOnlyDictionary.
internal abstract class DictionaryFormatterBase<TKey, TValue, TIntermediate, TEnumerator, TDictionary> : IMessagePackFormatter<TDictionary>
where TDictionary : IEnumerable<KeyValuePair<TKey, TValue>>
where TEnumerator : IEnumerator<KeyValuePair<TKey, TValue>>
{
public int Serialize(ref byte[] bytes, int offset, TDictionary value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
var keyFormatter = formatterResolver.GetFormatterWithVerify<TKey>();
var valueFormatter = formatterResolver.GetFormatterWithVerify<TValue>();
int count;
{
var col = value as ICollection<KeyValuePair<TKey, TValue>>;
if (col != null)
{
count = col.Count;
}
else
{
var col2 = value as IReadOnlyCollection<KeyValuePair<TKey, TValue>>;
if (col2 != null)
{
count = col2.Count;
}
else
{
throw new InvalidOperationException("DictionaryFormatterBase's TDictionary supports only ICollection<KVP> or IReadOnlyCollection<KVP>");
}
}
}
offset += MessagePackBinary.WriteMapHeader(ref bytes, offset, count);
var e = GetSourceEnumerator(value);
try
{
while (e.MoveNext())
{
var item = e.Current;
offset += keyFormatter.Serialize(ref bytes, offset, item.Key, formatterResolver);
offset += valueFormatter.Serialize(ref bytes, offset, item.Value, formatterResolver);
}
}
finally
{
e.Dispose();
}
return offset - startOffset;
}
}
public TDictionary Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return default(TDictionary);
}
else
{
var startOffset = offset;
var keyFormatter = formatterResolver.GetFormatterWithVerify<TKey>();
var valueFormatter = formatterResolver.GetFormatterWithVerify<TValue>();
var len = MessagePackBinary.ReadMapHeader(bytes, offset, out readSize);
offset += readSize;
var dict = Create(len);
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < len; i++)
{
var key = keyFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var value = valueFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
Add(dict, i, key, value);
}
}
readSize = offset - startOffset;
return Complete(dict);
}
}
// abstraction for serialize
// Some collections can use struct iterator, this is optimization path
protected abstract TEnumerator GetSourceEnumerator(TDictionary source);
// abstraction for deserialize
protected abstract TIntermediate Create(int count);
protected abstract void Add(TIntermediate collection, int index, TKey key, TValue value);
protected abstract TDictionary Complete(TIntermediate intermediateCollection);
}
internal abstract class DictionaryFormatterBase<TKey, TValue, TIntermediate, TDictionary> : DictionaryFormatterBase<TKey, TValue, TIntermediate, IEnumerator<KeyValuePair<TKey, TValue>>, TDictionary>
where TDictionary : IEnumerable<KeyValuePair<TKey, TValue>>
{
protected override IEnumerator<KeyValuePair<TKey, TValue>> GetSourceEnumerator(TDictionary source)
{
return source.GetEnumerator();
}
}
internal abstract class DictionaryFormatterBase<TKey, TValue, TDictionary> : DictionaryFormatterBase<TKey, TValue, TDictionary, TDictionary>
where TDictionary : IDictionary<TKey, TValue>
{
protected override TDictionary Complete(TDictionary intermediateCollection)
{
return intermediateCollection;
}
}
internal sealed class DictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, Dictionary<TKey, TValue>, Dictionary<TKey, TValue>.Enumerator, Dictionary<TKey, TValue>>
{
protected override void Add(Dictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override Dictionary<TKey, TValue> Complete(Dictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
protected override Dictionary<TKey, TValue> Create(int count)
{
return new Dictionary<TKey, TValue>(count, MessagePackSecurity.Active.GetEqualityComparer<TKey>());
}
protected override Dictionary<TKey, TValue>.Enumerator GetSourceEnumerator(Dictionary<TKey, TValue> source)
{
return source.GetEnumerator();
}
}
internal sealed class GenericDictionaryFormatter<TKey, TValue, TDictionary> : DictionaryFormatterBase<TKey, TValue, TDictionary>
where TDictionary : IDictionary<TKey, TValue>, new()
{
protected override void Add(TDictionary collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override TDictionary Create(int count)
{
return CollectionHelpers<TDictionary, IEqualityComparer<TKey>>.CreateHashCollection(count, MessagePackSecurity.Active.GetEqualityComparer<TKey>());
}
}
internal sealed class InterfaceDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, Dictionary<TKey, TValue>, IDictionary<TKey, TValue>>
{
protected override void Add(Dictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override Dictionary<TKey, TValue> Create(int count)
{
return new Dictionary<TKey, TValue>(count, MessagePackSecurity.Active.GetEqualityComparer<TKey>());
}
protected override IDictionary<TKey, TValue> Complete(Dictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
}
internal sealed class SortedListFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, SortedList<TKey, TValue>>
{
protected override void Add(SortedList<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override SortedList<TKey, TValue> Create(int count)
{
return new SortedList<TKey, TValue>(count);
}
}
internal sealed class SortedDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, SortedDictionary<TKey, TValue>, SortedDictionary<TKey, TValue>.Enumerator, SortedDictionary<TKey, TValue>>
{
protected override void Add(SortedDictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override SortedDictionary<TKey, TValue> Complete(SortedDictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
protected override SortedDictionary<TKey, TValue> Create(int count)
{
return new SortedDictionary<TKey, TValue>();
}
protected override SortedDictionary<TKey, TValue>.Enumerator GetSourceEnumerator(SortedDictionary<TKey, TValue> source)
{
return source.GetEnumerator();
}
}
internal sealed class ReadOnlyDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, Dictionary<TKey, TValue>, ReadOnlyDictionary<TKey, TValue>>
{
protected override void Add(Dictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override ReadOnlyDictionary<TKey, TValue> Complete(Dictionary<TKey, TValue> intermediateCollection)
{
return new ReadOnlyDictionary<TKey, TValue>(intermediateCollection);
}
protected override Dictionary<TKey, TValue> Create(int count)
{
return new Dictionary<TKey, TValue>(count, MessagePackSecurity.Active.GetEqualityComparer<TKey>());
}
}
internal sealed class InterfaceReadOnlyDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, Dictionary<TKey, TValue>, IReadOnlyDictionary<TKey, TValue>>
{
protected override void Add(Dictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override IReadOnlyDictionary<TKey, TValue> Complete(Dictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
protected override Dictionary<TKey, TValue> Create(int count)
{
return new Dictionary<TKey, TValue>(count, MessagePackSecurity.Active.GetEqualityComparer<TKey>());
}
}
internal sealed class ConcurrentDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, System.Collections.Concurrent.ConcurrentDictionary<TKey, TValue>>
{
protected override void Add(ConcurrentDictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.TryAdd(key, value);
}
protected override ConcurrentDictionary<TKey, TValue> Create(int count)
{
// concurrent dictionary can't access defaultConcurrecyLevel so does not use count overload.
return new ConcurrentDictionary<TKey, TValue>(MessagePackSecurity.Active.GetEqualityComparer<TKey>());
}
}
#else
internal abstract class DictionaryFormatterBase<TKey, TValue, TIntermediate, TDictionary> : IMessagePackFormatter<TDictionary>
where TDictionary : IDictionary<TKey, TValue>
{
public int Serialize(ref byte[] bytes, int offset, TDictionary value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
var keyFormatter = formatterResolver.GetFormatterWithVerify<TKey>();
var valueFormatter = formatterResolver.GetFormatterWithVerify<TValue>();
var count = value.Count;
offset += MessagePackBinary.WriteMapHeader(ref bytes, offset, count);
var e = value.GetEnumerator();
try
{
while (e.MoveNext())
{
var item = e.Current;
offset += keyFormatter.Serialize(ref bytes, offset, item.Key, formatterResolver);
offset += valueFormatter.Serialize(ref bytes, offset, item.Value, formatterResolver);
}
}
finally
{
e.Dispose();
}
return offset - startOffset;
}
}
public TDictionary Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return default(TDictionary);
}
else
{
var startOffset = offset;
var keyFormatter = formatterResolver.GetFormatterWithVerify<TKey>();
var valueFormatter = formatterResolver.GetFormatterWithVerify<TValue>();
var len = MessagePackBinary.ReadMapHeader(bytes, offset, out readSize);
offset += readSize;
var dict = Create(len);
for (int i = 0; i < len; i++)
{
var key = keyFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var value = valueFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
Add(dict, i, key, value);
}
readSize = offset - startOffset;
return Complete(dict);
}
}
// abstraction for deserialize
protected abstract TIntermediate Create(int count);
protected abstract void Add(TIntermediate collection, int index, TKey key, TValue value);
protected abstract TDictionary Complete(TIntermediate intermediateCollection);
}
internal sealed class DictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, Dictionary<TKey, TValue>, Dictionary<TKey, TValue>>
{
protected override void Add(Dictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override Dictionary<TKey, TValue> Complete(Dictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
protected override Dictionary<TKey, TValue> Create(int count)
{
return new Dictionary<TKey, TValue>(count);
}
}
internal sealed class GenericDictionaryFormatter<TKey, TValue, TDictionary> : DictionaryFormatterBase<TKey, TValue, TDictionary, TDictionary>
where TDictionary : IDictionary<TKey, TValue>, new()
{
protected override void Add(TDictionary collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override TDictionary Complete(TDictionary intermediateCollection)
{
return intermediateCollection;
}
protected override TDictionary Create(int count)
{
return new TDictionary();
}
}
internal sealed class InterfaceDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, Dictionary<TKey, TValue>, IDictionary<TKey, TValue>>
{
protected override void Add(Dictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override Dictionary<TKey, TValue> Create(int count)
{
return new Dictionary<TKey, TValue>(count);
}
protected override IDictionary<TKey, TValue> Complete(Dictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
}
internal sealed class SortedListFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, SortedList<TKey, TValue>, SortedList<TKey, TValue>>
{
protected override void Add(SortedList<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override SortedList<TKey, TValue> Complete(SortedList<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
protected override SortedList<TKey, TValue> Create(int count)
{
return new SortedList<TKey, TValue>(count);
}
}
internal sealed class SortedDictionaryFormatter<TKey, TValue> : DictionaryFormatterBase<TKey, TValue, SortedDictionary<TKey, TValue>, SortedDictionary<TKey, TValue>>
{
protected override void Add(SortedDictionary<TKey, TValue> collection, int index, TKey key, TValue value)
{
collection.Add(key, value);
}
protected override SortedDictionary<TKey, TValue> Complete(SortedDictionary<TKey, TValue> intermediateCollection)
{
return intermediateCollection;
}
protected override SortedDictionary<TKey, TValue> Create(int count)
{
return new SortedDictionary<TKey, TValue>();
}
}
#endif
}

View File

@ -0,0 +1,102 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if NETSTANDARD || NETFRAMEWORK
using Datadog.Trace.Vendors.MessagePack.Resolvers;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal sealed class DynamicObjectTypeFallbackFormatter : IMessagePackFormatter<object>
{
delegate int SerializeMethod(object dynamicFormatter, ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver);
readonly MessagePack.Internal.ThreadsafeTypeKeyHashTable<KeyValuePair<object, SerializeMethod>> serializers = new Internal.ThreadsafeTypeKeyHashTable<KeyValuePair<object, SerializeMethod>>();
readonly IFormatterResolver[] innerResolvers;
public DynamicObjectTypeFallbackFormatter(params IFormatterResolver[] innerResolvers)
{
this.innerResolvers = innerResolvers;
}
public int Serialize(ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
var type = value.GetType();
var ti = type.GetTypeInfo();
if (type == typeof(object))
{
// serialize to empty map
return MessagePackBinary.WriteMapHeader(ref bytes, offset, 0);
}
KeyValuePair<object, SerializeMethod> formatterAndDelegate;
if (!serializers.TryGetValue(type, out formatterAndDelegate))
{
lock (serializers)
{
if (!serializers.TryGetValue(type, out formatterAndDelegate))
{
object formatter = null;
foreach (var innerResolver in innerResolvers)
{
formatter = innerResolver.GetFormatterDynamic(type);
if (formatter != null) break;
}
if (formatter == null)
{
throw new FormatterNotRegisteredException(type.FullName + " is not registered in this resolver. resolvers:" + string.Join(", ", innerResolvers.Select(x => x.GetType().Name).ToArray()));
}
var t = type;
{
var formatterType = typeof(IMessagePackFormatter<>).MakeGenericType(t);
var param0 = Expression.Parameter(typeof(object), "formatter");
var param1 = Expression.Parameter(typeof(byte[]).MakeByRefType(), "bytes");
var param2 = Expression.Parameter(typeof(int), "offset");
var param3 = Expression.Parameter(typeof(object), "value");
var param4 = Expression.Parameter(typeof(IFormatterResolver), "formatterResolver");
var serializeMethodInfo = formatterType.GetRuntimeMethod("Serialize", new[] { typeof(byte[]).MakeByRefType(), typeof(int), t, typeof(IFormatterResolver) });
var body = Expression.Call(
Expression.Convert(param0, formatterType),
serializeMethodInfo,
param1,
param2,
ti.IsValueType ? Expression.Unbox(param3, t) : Expression.Convert(param3, t),
param4);
var lambda = Expression.Lambda<SerializeMethod>(body, param0, param1, param2, param3, param4).Compile();
formatterAndDelegate = new KeyValuePair<object, SerializeMethod>(formatter, lambda);
}
serializers.TryAdd(t, formatterAndDelegate);
}
}
}
return formatterAndDelegate.Value(formatterAndDelegate.Key, ref bytes, offset, value, formatterResolver);
}
public object Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return PrimitiveObjectFormatter.Instance.Deserialize(bytes, offset, formatterResolver, out readSize);
}
}
}
#endif

View File

@ -0,0 +1,54 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
// Note:This implemenataion is 'not' fastest, should more improve.
internal sealed class EnumAsStringFormatter<T> : IMessagePackFormatter<T>
{
readonly Dictionary<string, T> nameValueMapping;
readonly Dictionary<T, string> valueNameMapping;
public EnumAsStringFormatter()
{
var names = Enum.GetNames(typeof(T));
var values = Enum.GetValues(typeof(T));
nameValueMapping = new Dictionary<string, T>(names.Length);
valueNameMapping = new Dictionary<T, string>(names.Length);
for (int i = 0; i < names.Length; i++)
{
nameValueMapping[names[i]] = (T)values.GetValue(i);
valueNameMapping[(T)values.GetValue(i)] = names[i];
}
}
public int Serialize(ref byte[] bytes, int offset, T value, IFormatterResolver formatterResolver)
{
string name;
if (!valueNameMapping.TryGetValue(value, out name))
{
name = value.ToString(); // fallback for flags etc, But Enum.ToString is too slow.
}
return MessagePackBinary.WriteString(ref bytes, offset, name);
}
public T Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var name = MessagePackBinary.ReadString(bytes, offset, out readSize);
T value;
if (!nameValueMapping.TryGetValue(name, out value))
{
value = (T)Enum.Parse(typeof(T), name); // Enum.Parse is too slow
}
return value;
}
}
}

View File

@ -0,0 +1,833 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal sealed class ForceInt16BlockFormatter : IMessagePackFormatter<Int16>
{
public static readonly ForceInt16BlockFormatter Instance = new ForceInt16BlockFormatter();
ForceInt16BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int16 value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteInt16ForceInt16Block(ref bytes, offset, value);
}
public Int16 Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadInt16(bytes, offset, out readSize);
}
}
internal sealed class NullableForceInt16BlockFormatter : IMessagePackFormatter<Int16?>
{
public static readonly NullableForceInt16BlockFormatter Instance = new NullableForceInt16BlockFormatter();
NullableForceInt16BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int16? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteInt16ForceInt16Block(ref bytes, offset, value.Value);
}
}
public Int16? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadInt16(bytes, offset, out readSize);
}
}
}
internal sealed class ForceInt16BlockArrayFormatter : IMessagePackFormatter<Int16[]>
{
public static readonly ForceInt16BlockArrayFormatter Instance = new ForceInt16BlockArrayFormatter();
ForceInt16BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int16[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteInt16ForceInt16Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public Int16[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new Int16[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadInt16(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ForceInt32BlockFormatter : IMessagePackFormatter<Int32>
{
public static readonly ForceInt32BlockFormatter Instance = new ForceInt32BlockFormatter();
ForceInt32BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int32 value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteInt32ForceInt32Block(ref bytes, offset, value);
}
public Int32 Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadInt32(bytes, offset, out readSize);
}
}
internal sealed class NullableForceInt32BlockFormatter : IMessagePackFormatter<Int32?>
{
public static readonly NullableForceInt32BlockFormatter Instance = new NullableForceInt32BlockFormatter();
NullableForceInt32BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int32? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteInt32ForceInt32Block(ref bytes, offset, value.Value);
}
}
public Int32? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadInt32(bytes, offset, out readSize);
}
}
}
internal sealed class ForceInt32BlockArrayFormatter : IMessagePackFormatter<Int32[]>
{
public static readonly ForceInt32BlockArrayFormatter Instance = new ForceInt32BlockArrayFormatter();
ForceInt32BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int32[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteInt32ForceInt32Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public Int32[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new Int32[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ForceInt64BlockFormatter : IMessagePackFormatter<Int64>
{
public static readonly ForceInt64BlockFormatter Instance = new ForceInt64BlockFormatter();
ForceInt64BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int64 value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteInt64ForceInt64Block(ref bytes, offset, value);
}
public Int64 Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadInt64(bytes, offset, out readSize);
}
}
internal sealed class NullableForceInt64BlockFormatter : IMessagePackFormatter<Int64?>
{
public static readonly NullableForceInt64BlockFormatter Instance = new NullableForceInt64BlockFormatter();
NullableForceInt64BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int64? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteInt64ForceInt64Block(ref bytes, offset, value.Value);
}
}
public Int64? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadInt64(bytes, offset, out readSize);
}
}
}
internal sealed class ForceInt64BlockArrayFormatter : IMessagePackFormatter<Int64[]>
{
public static readonly ForceInt64BlockArrayFormatter Instance = new ForceInt64BlockArrayFormatter();
ForceInt64BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Int64[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteInt64ForceInt64Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public Int64[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new Int64[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadInt64(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ForceUInt16BlockFormatter : IMessagePackFormatter<UInt16>
{
public static readonly ForceUInt16BlockFormatter Instance = new ForceUInt16BlockFormatter();
ForceUInt16BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt16 value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteUInt16ForceUInt16Block(ref bytes, offset, value);
}
public UInt16 Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadUInt16(bytes, offset, out readSize);
}
}
internal sealed class NullableForceUInt16BlockFormatter : IMessagePackFormatter<UInt16?>
{
public static readonly NullableForceUInt16BlockFormatter Instance = new NullableForceUInt16BlockFormatter();
NullableForceUInt16BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt16? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteUInt16ForceUInt16Block(ref bytes, offset, value.Value);
}
}
public UInt16? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadUInt16(bytes, offset, out readSize);
}
}
}
internal sealed class ForceUInt16BlockArrayFormatter : IMessagePackFormatter<UInt16[]>
{
public static readonly ForceUInt16BlockArrayFormatter Instance = new ForceUInt16BlockArrayFormatter();
ForceUInt16BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt16[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteUInt16ForceUInt16Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public UInt16[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new UInt16[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadUInt16(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ForceUInt32BlockFormatter : IMessagePackFormatter<UInt32>
{
public static readonly ForceUInt32BlockFormatter Instance = new ForceUInt32BlockFormatter();
ForceUInt32BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt32 value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteUInt32ForceUInt32Block(ref bytes, offset, value);
}
public UInt32 Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadUInt32(bytes, offset, out readSize);
}
}
internal sealed class NullableForceUInt32BlockFormatter : IMessagePackFormatter<UInt32?>
{
public static readonly NullableForceUInt32BlockFormatter Instance = new NullableForceUInt32BlockFormatter();
NullableForceUInt32BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt32? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteUInt32ForceUInt32Block(ref bytes, offset, value.Value);
}
}
public UInt32? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadUInt32(bytes, offset, out readSize);
}
}
}
internal sealed class ForceUInt32BlockArrayFormatter : IMessagePackFormatter<UInt32[]>
{
public static readonly ForceUInt32BlockArrayFormatter Instance = new ForceUInt32BlockArrayFormatter();
ForceUInt32BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt32[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteUInt32ForceUInt32Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public UInt32[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new UInt32[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadUInt32(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ForceUInt64BlockFormatter : IMessagePackFormatter<UInt64>
{
public static readonly ForceUInt64BlockFormatter Instance = new ForceUInt64BlockFormatter();
ForceUInt64BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt64 value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteUInt64ForceUInt64Block(ref bytes, offset, value);
}
public UInt64 Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadUInt64(bytes, offset, out readSize);
}
}
internal sealed class NullableForceUInt64BlockFormatter : IMessagePackFormatter<UInt64?>
{
public static readonly NullableForceUInt64BlockFormatter Instance = new NullableForceUInt64BlockFormatter();
NullableForceUInt64BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt64? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteUInt64ForceUInt64Block(ref bytes, offset, value.Value);
}
}
public UInt64? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadUInt64(bytes, offset, out readSize);
}
}
}
internal sealed class ForceUInt64BlockArrayFormatter : IMessagePackFormatter<UInt64[]>
{
public static readonly ForceUInt64BlockArrayFormatter Instance = new ForceUInt64BlockArrayFormatter();
ForceUInt64BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, UInt64[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteUInt64ForceUInt64Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public UInt64[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new UInt64[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadUInt64(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ForceByteBlockFormatter : IMessagePackFormatter<Byte>
{
public static readonly ForceByteBlockFormatter Instance = new ForceByteBlockFormatter();
ForceByteBlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Byte value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteByteForceByteBlock(ref bytes, offset, value);
}
public Byte Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadByte(bytes, offset, out readSize);
}
}
internal sealed class NullableForceByteBlockFormatter : IMessagePackFormatter<Byte?>
{
public static readonly NullableForceByteBlockFormatter Instance = new NullableForceByteBlockFormatter();
NullableForceByteBlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Byte? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteByteForceByteBlock(ref bytes, offset, value.Value);
}
}
public Byte? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadByte(bytes, offset, out readSize);
}
}
}
internal sealed class ForceSByteBlockFormatter : IMessagePackFormatter<SByte>
{
public static readonly ForceSByteBlockFormatter Instance = new ForceSByteBlockFormatter();
ForceSByteBlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, SByte value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteSByteForceSByteBlock(ref bytes, offset, value);
}
public SByte Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadSByte(bytes, offset, out readSize);
}
}
internal sealed class NullableForceSByteBlockFormatter : IMessagePackFormatter<SByte?>
{
public static readonly NullableForceSByteBlockFormatter Instance = new NullableForceSByteBlockFormatter();
NullableForceSByteBlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, SByte? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteSByteForceSByteBlock(ref bytes, offset, value.Value);
}
}
public SByte? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.ReadSByte(bytes, offset, out readSize);
}
}
}
internal sealed class ForceSByteBlockArrayFormatter : IMessagePackFormatter<SByte[]>
{
public static readonly ForceSByteBlockArrayFormatter Instance = new ForceSByteBlockArrayFormatter();
ForceSByteBlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, SByte[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteSByteForceSByteBlock(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public SByte[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new SByte[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadSByte(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
}

View File

@ -0,0 +1,138 @@
<#@ template debug="false" hostspecific="false" language="C#" #>
<#@ assembly name="System.Core" #>
<#@ import namespace="System.Linq" #>
<#@ import namespace="System.Text" #>
<#@ import namespace="System.Collections.Generic" #>
<#@ output extension=".cs" #>
<#
var types = new[]
{
typeof(Int16),
typeof(Int32),
typeof(Int64),
typeof(UInt16),
typeof(UInt32),
typeof(UInt64),
typeof(byte),
typeof(sbyte),
};
#>
using System;
namespace MessagePack.Formatters
{
<# foreach(var t in types) { #>
public sealed class Force<#= t.Name #>BlockFormatter : IMessagePackFormatter<<#= t.Name #>>
{
public static readonly Force<#= t.Name #>BlockFormatter Instance = new Force<#= t.Name #>BlockFormatter();
Force<#= t.Name #>BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, <#= t.Name #> value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.Write<#= t.Name #>Force<#= t.Name #>Block(ref bytes, offset, value);
}
public <#= t.Name #> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.Read<#= t.Name #>(bytes, offset, out readSize);
}
}
public sealed class NullableForce<#= t.Name #>BlockFormatter : IMessagePackFormatter<<#= t.Name #>?>
{
public static readonly NullableForce<#= t.Name #>BlockFormatter Instance = new NullableForce<#= t.Name #>BlockFormatter();
NullableForce<#= t.Name #>BlockFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, <#= t.Name #>? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.Write<#= t.Name #>Force<#= t.Name #>Block(ref bytes, offset, value.Value);
}
}
public <#= t.Name #>? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.Read<#= t.Name #>(bytes, offset, out readSize);
}
}
}
<# if(t.Name != "Byte") { #>
public sealed class Force<#= t.Name #>BlockArrayFormatter : IMessagePackFormatter<<#= t.Name #>[]>
{
public static readonly Force<#= t.Name #>BlockArrayFormatter Instance = new Force<#= t.Name #>BlockArrayFormatter();
Force<#= t.Name #>BlockArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, <#= t.Name #>[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.Write<#= t.Name #>Force<#= t.Name #>Block(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public <#= t.Name #>[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new <#= t.Name #>[len];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.Read<#= t.Name #>(bytes, offset, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
<# } #>
<# } #>
}

View File

@ -0,0 +1,19 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
// marker
internal interface IMessagePackFormatter
{
}
internal interface IMessagePackFormatter<T> : IMessagePackFormatter
{
int Serialize(ref byte[] bytes, int offset, T value, IFormatterResolver formatterResolver);
T Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize);
}
}

View File

@ -0,0 +1,20 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal sealed class IgnoreFormatter<T> : IMessagePackFormatter<T>
{
public int Serialize(ref byte[] bytes, int offset, T value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
public T Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
readSize = MessagePackBinary.ReadNextBlock(bytes, offset);
return default(T);
}
}
}

View File

@ -0,0 +1,307 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
// multi dimentional array serialize to [i, j, [seq]]
internal sealed class TwoDimentionalArrayFormatter<T> : IMessagePackFormatter<T[,]>
{
const int ArrayLength = 3;
public int Serialize(ref byte[] bytes, int offset, T[,] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var i = value.GetLength(0);
var j = value.GetLength(1);
var startOffset = offset;
var formatter = formatterResolver.GetFormatterWithVerify<T>();
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, ArrayLength);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, i);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, j);
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
foreach (var item in value)
{
offset += formatter.Serialize(ref bytes, offset, item, formatterResolver);
}
return offset - startOffset;
}
}
public T[,] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var formatter = formatterResolver.GetFormatterWithVerify<T>();
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
if (len != ArrayLength) throw new InvalidOperationException("Invalid T[,] format");
var iLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var jLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var maxLen = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new T[iLength, jLength];
var i = 0;
var j = -1;
using (MessagePackSecurity.DepthStep())
{
for (int loop = 0; loop < maxLen; loop++)
{
if (j < jLength - 1)
{
j++;
}
else
{
j = 0;
i++;
}
array[i, j] = formatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class ThreeDimentionalArrayFormatter<T> : IMessagePackFormatter<T[,,]>
{
const int ArrayLength = 4;
public int Serialize(ref byte[] bytes, int offset, T[,,] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var i = value.GetLength(0);
var j = value.GetLength(1);
var k = value.GetLength(2);
var startOffset = offset;
var formatter = formatterResolver.GetFormatterWithVerify<T>();
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, ArrayLength);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, i);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, j);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, k);
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
foreach (var item in value)
{
offset += formatter.Serialize(ref bytes, offset, item, formatterResolver);
}
return offset - startOffset;
}
}
public T[,,] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var formatter = formatterResolver.GetFormatterWithVerify<T>();
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
if (len != ArrayLength) throw new InvalidOperationException("Invalid T[,,] format");
var iLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var jLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var kLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var maxLen = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new T[iLength, jLength, kLength];
var i = 0;
var j = 0;
var k = -1;
using (MessagePackSecurity.DepthStep())
{
for (int loop = 0; loop < maxLen; loop++)
{
if (k < kLength - 1)
{
k++;
}
else if (j < jLength - 1)
{
k = 0;
j++;
}
else
{
k = 0;
j = 0;
i++;
}
array[i, j, k] = formatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class FourDimentionalArrayFormatter<T> : IMessagePackFormatter<T[,,,]>
{
const int ArrayLength = 5;
public int Serialize(ref byte[] bytes, int offset, T[,,,] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var i = value.GetLength(0);
var j = value.GetLength(1);
var k = value.GetLength(2);
var l = value.GetLength(3);
var startOffset = offset;
var formatter = formatterResolver.GetFormatterWithVerify<T>();
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, ArrayLength);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, i);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, j);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, k);
offset += MessagePackBinary.WriteInt32(ref bytes, offset, l);
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
foreach (var item in value)
{
offset += formatter.Serialize(ref bytes, offset, item, formatterResolver);
}
return offset - startOffset;
}
}
public T[,,,] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var formatter = formatterResolver.GetFormatterWithVerify<T>();
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
if (len != ArrayLength) throw new InvalidOperationException("Invalid T[,,,] format");
var iLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var jLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var kLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var lLength = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var maxLen = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new T[iLength, jLength, kLength, lLength];
var i = 0;
var j = 0;
var k = 0;
var l = -1;
using (MessagePackSecurity.DepthStep())
{
for (int loop = 0; loop < maxLen; loop++)
{
if (l < lLength - 1)
{
l++;
}
else if (k < kLength - 1)
{
l = 0;
k++;
}
else if (j < jLength - 1)
{
l = 0;
k = 0;
j++;
}
else
{
l = 0;
k = 0;
j = 0;
i++;
}
array[i, j, k, l] = formatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
}
}
}

View File

@ -0,0 +1,75 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal sealed class NullableFormatter<T> : IMessagePackFormatter<T?>
where T : struct
{
public int Serialize(ref byte[] bytes, int offset, T? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return formatterResolver.GetFormatterWithVerify<T>().Serialize(ref bytes, offset, value.Value, formatterResolver);
}
}
public T? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return formatterResolver.GetFormatterWithVerify<T>().Deserialize(bytes, offset, formatterResolver, out readSize);
}
}
}
internal sealed class StaticNullableFormatter<T> : IMessagePackFormatter<T?>
where T : struct
{
readonly IMessagePackFormatter<T> underlyingFormatter;
public StaticNullableFormatter(IMessagePackFormatter<T> underlyingFormatter)
{
this.underlyingFormatter = underlyingFormatter;
}
public int Serialize(ref byte[] bytes, int offset, T? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return underlyingFormatter.Serialize(ref bytes, offset, value.Value, formatterResolver);
}
}
public T? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return underlyingFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
}
}
}
}

View File

@ -0,0 +1,264 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
/// <summary>
/// Serialize by .NET native DateTime binary format.
/// </summary>
internal sealed class NativeDateTimeFormatter : IMessagePackFormatter<DateTime>
{
public static readonly NativeDateTimeFormatter Instance = new NativeDateTimeFormatter();
public int Serialize(ref byte[] bytes, int offset, DateTime value, IFormatterResolver formatterResolver)
{
var dateData = value.ToBinary();
return MessagePackBinary.WriteInt64(ref bytes, offset, dateData);
}
public DateTime Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.GetMessagePackType(bytes, offset) == MessagePackType.Extension)
{
return DateTimeFormatter.Instance.Deserialize(bytes, offset, formatterResolver, out readSize);
}
var dateData = MessagePackBinary.ReadInt64(bytes, offset, out readSize);
return DateTime.FromBinary(dateData);
}
}
internal sealed class NativeDateTimeArrayFormatter : IMessagePackFormatter<DateTime[]>
{
public static readonly NativeDateTimeArrayFormatter Instance = new NativeDateTimeArrayFormatter();
public int Serialize(ref byte[] bytes, int offset, DateTime[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteInt64(ref bytes, offset, value[i].ToBinary());
}
return offset - startOffset;
}
}
public DateTime[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new DateTime[len];
for (int i = 0; i < array.Length; i++)
{
var dateData = MessagePackBinary.ReadInt64(bytes, offset, out readSize);
array[i] = DateTime.FromBinary(dateData);
offset += readSize;
}
readSize = offset - startOffset;
return array;
}
}
}
// Old-Spec
// bin 8, bin 16, bin 32, str 8, str 16, str 32 -> fixraw or raw 16 or raw 32
// fixraw -> fixstr, raw16 -> str16, raw32 -> str32
// https://github.com/msgpack/msgpack/blob/master/spec-old.md
/// <summary>
/// Old-MessagePack spec's string formatter.
/// </summary>
internal sealed class OldSpecStringFormatter : IMessagePackFormatter<string>
{
public static readonly OldSpecStringFormatter Instance = new OldSpecStringFormatter();
// Old spec does not exists str 8 format.
public int Serialize(ref byte[] bytes, int offset, string value, IFormatterResolver formatterResolver)
{
if (value == null) return MessagePackBinary.WriteNil(ref bytes, offset);
MessagePackBinary.EnsureCapacity(ref bytes, offset, StringEncoding.UTF8.GetMaxByteCount(value.Length) + 5);
int useOffset;
if (value.Length <= MessagePackRange.MaxFixStringLength)
{
useOffset = 1;
}
else if (value.Length <= ushort.MaxValue)
{
useOffset = 3;
}
else
{
useOffset = 5;
}
// skip length area
var writeBeginOffset = offset + useOffset;
var byteCount = StringEncoding.UTF8.GetBytes(value, 0, value.Length, bytes, writeBeginOffset);
// move body and write prefix
if (byteCount <= MessagePackRange.MaxFixStringLength)
{
if (useOffset != 1)
{
Buffer.BlockCopy(bytes, writeBeginOffset, bytes, offset + 1, byteCount);
}
bytes[offset] = (byte)(MessagePackCode.MinFixStr | byteCount);
return byteCount + 1;
}
else if (byteCount <= ushort.MaxValue)
{
if (useOffset != 3)
{
Buffer.BlockCopy(bytes, writeBeginOffset, bytes, offset + 3, byteCount);
}
bytes[offset] = MessagePackCode.Str16;
bytes[offset + 1] = unchecked((byte)(byteCount >> 8));
bytes[offset + 2] = unchecked((byte)byteCount);
return byteCount + 3;
}
else
{
if (useOffset != 5)
{
Buffer.BlockCopy(bytes, writeBeginOffset, bytes, offset + 5, byteCount);
}
bytes[offset] = MessagePackCode.Str32;
bytes[offset + 1] = unchecked((byte)(byteCount >> 24));
bytes[offset + 2] = unchecked((byte)(byteCount >> 16));
bytes[offset + 3] = unchecked((byte)(byteCount >> 8));
bytes[offset + 4] = unchecked((byte)byteCount);
return byteCount + 5;
}
}
public string Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadString(bytes, offset, out readSize);
}
}
/// <summary>
/// Old-MessagePack spec's binary formatter.
/// </summary>
internal sealed class OldSpecBinaryFormatter : IMessagePackFormatter<byte[]>
{
public static readonly OldSpecBinaryFormatter Instance = new OldSpecBinaryFormatter();
public int Serialize(ref byte[] bytes, int offset, byte[] value, IFormatterResolver formatterResolver)
{
if (value == null) return MessagePackBinary.WriteNil(ref bytes, offset);
var byteCount = value.Length;
if (byteCount <= MessagePackRange.MaxFixStringLength)
{
MessagePackBinary.EnsureCapacity(ref bytes, offset, byteCount + 1);
bytes[offset] = (byte)(MessagePackCode.MinFixStr | byteCount);
Buffer.BlockCopy(value, 0, bytes, offset + 1, byteCount);
return byteCount + 1;
}
else if (byteCount <= ushort.MaxValue)
{
MessagePackBinary.EnsureCapacity(ref bytes, offset, byteCount + 3);
bytes[offset] = MessagePackCode.Str16;
bytes[offset + 1] = unchecked((byte)(byteCount >> 8));
bytes[offset + 2] = unchecked((byte)byteCount);
Buffer.BlockCopy(value, 0, bytes, offset + 3, byteCount);
return byteCount + 3;
}
else
{
MessagePackBinary.EnsureCapacity(ref bytes, offset, byteCount + 5);
bytes[offset] = MessagePackCode.Str32;
bytes[offset + 1] = unchecked((byte)(byteCount >> 24));
bytes[offset + 2] = unchecked((byte)(byteCount >> 16));
bytes[offset + 3] = unchecked((byte)(byteCount >> 8));
bytes[offset + 4] = unchecked((byte)byteCount);
Buffer.BlockCopy(value, 0, bytes, offset + 5, byteCount);
return byteCount + 5;
}
}
public byte[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var type = MessagePackBinary.GetMessagePackType(bytes, offset);
if (type == MessagePackType.Nil)
{
readSize = 1;
return null;
}
else if (type == MessagePackType.Binary)
{
return MessagePackBinary.ReadBytes(bytes, offset, out readSize);
}
else if (type == MessagePackType.String)
{
var code = bytes[offset];
unchecked
{
if (MessagePackCode.MinFixStr <= code && code <= MessagePackCode.MaxFixStr)
{
var length = bytes[offset] & 0x1F;
readSize = length + 1;
var result = new byte[length];
Buffer.BlockCopy(bytes, offset + 1, result, 0, result.Length);
return result;
}
else if (code == MessagePackCode.Str8)
{
var length = (int)bytes[offset + 1];
readSize = length + 2;
var result = new byte[length];
Buffer.BlockCopy(bytes, offset + 2, result, 0, result.Length);
return result;
}
else if (code == MessagePackCode.Str16)
{
var length = (bytes[offset + 1] << 8) + (bytes[offset + 2]);
readSize = length + 3;
var result = new byte[length];
Buffer.BlockCopy(bytes, offset + 3, result, 0, result.Length);
return result;
}
else if (code == MessagePackCode.Str32)
{
var length = (int)((uint)(bytes[offset + 1] << 24) | (uint)(bytes[offset + 2] << 16) | (uint)(bytes[offset + 3] << 8) | (uint)bytes[offset + 4]);
readSize = length + 5;
var result = new byte[length];
Buffer.BlockCopy(bytes, offset + 5, result, 0, result.Length);
return result;
}
}
}
throw new InvalidOperationException(string.Format("code is invalid. code:{0} format:{1}", bytes[offset], MessagePackCode.ToFormatName(bytes[offset])));
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,139 @@
<#@ template debug="false" hostspecific="false" language="C#" #>
<#@ assembly name="System.Core" #>
<#@ import namespace="System.Linq" #>
<#@ import namespace="System.Text" #>
<#@ import namespace="System.Collections.Generic" #>
<#@ output extension=".cs" #>
<#
var types = new[]
{
typeof(Int16),
typeof(Int32),
typeof(Int64),
typeof(UInt16),
typeof(UInt32),
typeof(UInt64),
typeof(Single),
typeof(Double),
typeof(bool),
typeof(byte),
typeof(sbyte),
typeof(char),
typeof(DateTime)
};
#>
using System;
namespace MessagePack.Formatters
{
<# foreach(var t in types) { #>
public sealed class <#= t.Name #>Formatter : IMessagePackFormatter<<#= t.Name #>>
{
public static readonly <#= t.Name #>Formatter Instance = new <#= t.Name #>Formatter();
<#= t.Name #>Formatter()
{
}
public int Serialize(ref byte[] bytes, int offset, <#= t.Name #> value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.Write<#= t.Name #>(ref bytes, offset, value);
}
public <#= t.Name #> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.Read<#= t.Name #>(bytes, offset, out readSize);
}
}
public sealed class Nullable<#= t.Name #>Formatter : IMessagePackFormatter<<#= t.Name #>?>
{
public static readonly Nullable<#= t.Name #>Formatter Instance = new Nullable<#= t.Name #>Formatter();
Nullable<#= t.Name #>Formatter()
{
}
public int Serialize(ref byte[] bytes, int offset, <#= t.Name #>? value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.Write<#= t.Name #>(ref bytes, offset, value.Value);
}
}
public <#= t.Name #>? Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return MessagePackBinary.Read<#= t.Name #>(bytes, offset, out readSize);
}
}
}
<# if(t.Name != "Byte") { #>
public sealed class <#= t.Name #>ArrayFormatter : IMessagePackFormatter<<#= t.Name #>[]>
{
public static readonly <#= t.Name #>ArrayFormatter Instance = new <#= t.Name #>ArrayFormatter();
<#= t.Name #>ArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, <#= t.Name #>[] value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.Write<#= t.Name #>(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public <#= t.Name #>[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new <#= t.Name #>[len];
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.Read<#= t.Name #>(bytes, offset, out readSize);
offset += readSize;
}
readSize = offset - startOffset;
return array;
}
}
}
<# } #>
<# } #>
}

View File

@ -0,0 +1,257 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Reflection;
using System.Collections.Generic;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal sealed class PrimitiveObjectFormatter : IMessagePackFormatter<object>
{
public static readonly IMessagePackFormatter<object> Instance = new PrimitiveObjectFormatter();
static readonly Dictionary<Type, int> typeToJumpCode = new Dictionary<Type, int>()
{
// When adding types whose size exceeds 32-bits, add support in MessagePackSecurity.GetHashCollisionResistantEqualityComparer<T>()
{ typeof(Boolean), 0 },
{ typeof(Char), 1 },
{ typeof(SByte), 2 },
{ typeof(Byte), 3 },
{ typeof(Int16), 4 },
{ typeof(UInt16), 5 },
{ typeof(Int32), 6 },
{ typeof(UInt32), 7 },
{ typeof(Int64), 8 },
{ typeof(UInt64),9 },
{ typeof(Single), 10 },
{ typeof(Double), 11 },
{ typeof(DateTime), 12 },
{ typeof(string), 13 },
{ typeof(byte[]), 14 }
};
PrimitiveObjectFormatter()
{
}
#if !UNITY_WSA
public static bool IsSupportedType(Type type, TypeInfo typeInfo, object value)
{
if (value == null) return true;
if (typeToJumpCode.ContainsKey(type)) return true;
if (typeInfo.IsEnum) return true;
if (value is System.Collections.IDictionary) return true;
if (value is System.Collections.ICollection) return true;
return false;
}
#endif
public int Serialize(ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
var t = value.GetType();
int code;
if (typeToJumpCode.TryGetValue(t, out code))
{
switch (code)
{
case 0:
return MessagePackBinary.WriteBoolean(ref bytes, offset, (bool)value);
case 1:
return MessagePackBinary.WriteChar(ref bytes, offset, (char)value);
case 2:
return MessagePackBinary.WriteSByteForceSByteBlock(ref bytes, offset, (sbyte)value);
case 3:
return MessagePackBinary.WriteByteForceByteBlock(ref bytes, offset, (byte)value);
case 4:
return MessagePackBinary.WriteInt16ForceInt16Block(ref bytes, offset, (Int16)value);
case 5:
return MessagePackBinary.WriteUInt16ForceUInt16Block(ref bytes, offset, (UInt16)value);
case 6:
return MessagePackBinary.WriteInt32ForceInt32Block(ref bytes, offset, (Int32)value);
case 7:
return MessagePackBinary.WriteUInt32ForceUInt32Block(ref bytes, offset, (UInt32)value);
case 8:
return MessagePackBinary.WriteInt64ForceInt64Block(ref bytes, offset, (Int64)value);
case 9:
return MessagePackBinary.WriteUInt64ForceUInt64Block(ref bytes, offset, (UInt64)value);
case 10:
return MessagePackBinary.WriteSingle(ref bytes, offset, (Single)value);
case 11:
return MessagePackBinary.WriteDouble(ref bytes, offset, (double)value);
case 12:
return MessagePackBinary.WriteDateTime(ref bytes, offset, (DateTime)value);
case 13:
return MessagePackBinary.WriteString(ref bytes, offset, (string)value);
case 14:
return MessagePackBinary.WriteBytes(ref bytes, offset, (byte[])value);
default:
throw new InvalidOperationException("Not supported primitive object resolver. type:" + t.Name);
}
}
else
{
#if UNITY_WSA && !NETFX_CORE
if (t.IsEnum)
#else
if (t.GetTypeInfo().IsEnum)
#endif
{
var underlyingType = Enum.GetUnderlyingType(t);
var code2 = typeToJumpCode[underlyingType];
switch (code2)
{
case 2:
return MessagePackBinary.WriteSByteForceSByteBlock(ref bytes, offset, (sbyte)value);
case 3:
return MessagePackBinary.WriteByteForceByteBlock(ref bytes, offset, (byte)value);
case 4:
return MessagePackBinary.WriteInt16ForceInt16Block(ref bytes, offset, (Int16)value);
case 5:
return MessagePackBinary.WriteUInt16ForceUInt16Block(ref bytes, offset, (UInt16)value);
case 6:
return MessagePackBinary.WriteInt32ForceInt32Block(ref bytes, offset, (Int32)value);
case 7:
return MessagePackBinary.WriteUInt32ForceUInt32Block(ref bytes, offset, (UInt32)value);
case 8:
return MessagePackBinary.WriteInt64ForceInt64Block(ref bytes, offset, (Int64)value);
case 9:
return MessagePackBinary.WriteUInt64ForceUInt64Block(ref bytes, offset, (UInt64)value);
default:
break;
}
}
else if (value is System.Collections.IDictionary) // check IDictionary first
{
var d = value as System.Collections.IDictionary;
var startOffset = offset;
offset += MessagePackBinary.WriteMapHeader(ref bytes, offset, d.Count);
foreach (System.Collections.DictionaryEntry item in d)
{
offset += Serialize(ref bytes, offset, item.Key, formatterResolver);
offset += Serialize(ref bytes, offset, item.Value, formatterResolver);
}
return offset - startOffset;
}
else if (value is System.Collections.ICollection)
{
var c = value as System.Collections.ICollection;
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, c.Count);
foreach (var item in c)
{
offset += Serialize(ref bytes, offset, item, formatterResolver);
}
return offset - startOffset;
}
}
throw new InvalidOperationException("Not supported primitive object resolver. type:" + t.Name);
}
public object Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var type = MessagePackBinary.GetMessagePackType(bytes, offset);
switch (type)
{
case MessagePackType.Integer:
var code = bytes[offset];
if (MessagePackCode.MinNegativeFixInt <= code && code <= MessagePackCode.MaxNegativeFixInt) return MessagePackBinary.ReadSByte(bytes, offset, out readSize);
else if (MessagePackCode.MinFixInt <= code && code <= MessagePackCode.MaxFixInt) return MessagePackBinary.ReadByte(bytes, offset, out readSize);
else if (code == MessagePackCode.Int8) return MessagePackBinary.ReadSByte(bytes, offset, out readSize);
else if (code == MessagePackCode.Int16) return MessagePackBinary.ReadInt16(bytes, offset, out readSize);
else if (code == MessagePackCode.Int32) return MessagePackBinary.ReadInt32(bytes, offset, out readSize);
else if (code == MessagePackCode.Int64) return MessagePackBinary.ReadInt64(bytes, offset, out readSize);
else if (code == MessagePackCode.UInt8) return MessagePackBinary.ReadByte(bytes, offset, out readSize);
else if (code == MessagePackCode.UInt16) return MessagePackBinary.ReadUInt16(bytes, offset, out readSize);
else if (code == MessagePackCode.UInt32) return MessagePackBinary.ReadUInt32(bytes, offset, out readSize);
else if (code == MessagePackCode.UInt64) return MessagePackBinary.ReadUInt64(bytes, offset, out readSize);
throw new InvalidOperationException("Invalid primitive bytes.");
case MessagePackType.Boolean:
return MessagePackBinary.ReadBoolean(bytes, offset, out readSize);
case MessagePackType.Float:
if (MessagePackCode.Float32 == bytes[offset])
{
return MessagePackBinary.ReadSingle(bytes, offset, out readSize);
}
else
{
return MessagePackBinary.ReadDouble(bytes, offset, out readSize);
}
case MessagePackType.String:
return MessagePackBinary.ReadString(bytes, offset, out readSize);
case MessagePackType.Binary:
return MessagePackBinary.ReadBytes(bytes, offset, out readSize);
case MessagePackType.Extension:
var ext = MessagePackBinary.ReadExtensionFormatHeader(bytes, offset, out readSize);
if (ext.TypeCode == ReservedMessagePackExtensionTypeCode.DateTime)
{
return MessagePackBinary.ReadDateTime(bytes, offset, out readSize);
}
throw new InvalidOperationException("Invalid primitive bytes.");
case MessagePackType.Array:
{
var length = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
var startOffset = offset;
offset += readSize;
var objectFormatter = formatterResolver.GetFormatter<object>();
var array = new object[length];
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < length; i++)
{
array[i] = objectFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
}
}
readSize = offset - startOffset;
return array;
}
case MessagePackType.Map:
{
var length = MessagePackBinary.ReadMapHeader(bytes, offset, out readSize);
var startOffset = offset;
offset += readSize;
var objectFormatter = formatterResolver.GetFormatter<object>();
var hash = new Dictionary<object, object>(length, MessagePackSecurity.Active.GetEqualityComparer<object>());
using (MessagePackSecurity.DepthStep())
{
for (int i = 0; i < length; i++)
{
var key = objectFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var value = objectFormatter.Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
hash.Add(key, value);
}
}
readSize = offset - startOffset;
return hash;
}
case MessagePackType.Nil:
readSize = 1;
return null;
default:
throw new InvalidOperationException("Invalid primitive bytes.");
}
}
}
}

View File

@ -0,0 +1,574 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
#if NETSTANDARD || NETFRAMEWORK
using System.Threading.Tasks;
#endif
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
// NET40 -> BigInteger, Complex, Tuple
// byte[] is special. represents bin type.
internal sealed class ByteArrayFormatter : IMessagePackFormatter<byte[]>
{
public static readonly ByteArrayFormatter Instance = new ByteArrayFormatter();
ByteArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, byte[] value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteBytes(ref bytes, offset, value);
}
public byte[] Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return MessagePackBinary.ReadBytes(bytes, offset, out readSize);
}
}
internal sealed class NullableStringFormatter : IMessagePackFormatter<String>
{
public static readonly NullableStringFormatter Instance = new NullableStringFormatter();
NullableStringFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, String value, IFormatterResolver typeResolver)
{
return MessagePackBinary.WriteString(ref bytes, offset, value);
}
public String Deserialize(byte[] bytes, int offset, IFormatterResolver typeResolver, out int readSize)
{
return MessagePackBinary.ReadString(bytes, offset, out readSize);
}
}
internal sealed class NullableStringArrayFormatter : IMessagePackFormatter<String[]>
{
public static readonly NullableStringArrayFormatter Instance = new NullableStringArrayFormatter();
NullableStringArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, String[] value, IFormatterResolver typeResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, value.Length);
for (int i = 0; i < value.Length; i++)
{
offset += MessagePackBinary.WriteString(ref bytes, offset, value[i]);
}
return offset - startOffset;
}
}
public String[] Deserialize(byte[] bytes, int offset, IFormatterResolver typeResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new String[len];
for (int i = 0; i < array.Length; i++)
{
array[i] = MessagePackBinary.ReadString(bytes, offset, out readSize);
offset += readSize;
}
readSize = offset - startOffset;
return array;
}
}
}
internal sealed class DecimalFormatter : IMessagePackFormatter<Decimal>
{
public static readonly DecimalFormatter Instance = new DecimalFormatter();
DecimalFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, decimal value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteString(ref bytes, offset, value.ToString(CultureInfo.InvariantCulture));
}
public decimal Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return decimal.Parse(MessagePackBinary.ReadString(bytes, offset, out readSize), CultureInfo.InvariantCulture);
}
}
internal sealed class TimeSpanFormatter : IMessagePackFormatter<TimeSpan>
{
public static readonly IMessagePackFormatter<TimeSpan> Instance = new TimeSpanFormatter();
TimeSpanFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, TimeSpan value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteInt64(ref bytes, offset, value.Ticks);
}
public TimeSpan Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return new TimeSpan(MessagePackBinary.ReadInt64(bytes, offset, out readSize));
}
}
internal sealed class DateTimeOffsetFormatter : IMessagePackFormatter<DateTimeOffset>
{
public static readonly IMessagePackFormatter<DateTimeOffset> Instance = new DateTimeOffsetFormatter();
DateTimeOffsetFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, DateTimeOffset value, IFormatterResolver formatterResolver)
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 2);
offset += MessagePackBinary.WriteDateTime(ref bytes, offset, new DateTime(value.Ticks, DateTimeKind.Utc)); // current ticks as is
offset += MessagePackBinary.WriteInt16(ref bytes, offset, (short)value.Offset.TotalMinutes); // offset is normalized in minutes
return offset - startOffset;
}
public DateTimeOffset Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
if (count != 2) throw new InvalidOperationException("Invalid DateTimeOffset format.");
var utc = MessagePackBinary.ReadDateTime(bytes, offset, out readSize);
offset += readSize;
var dtOffsetMinutes = MessagePackBinary.ReadInt16(bytes, offset, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new DateTimeOffset(utc.Ticks, TimeSpan.FromMinutes(dtOffsetMinutes));
}
}
internal sealed class GuidFormatter : IMessagePackFormatter<Guid>
{
public static readonly IMessagePackFormatter<Guid> Instance = new GuidFormatter();
GuidFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Guid value, IFormatterResolver formatterResolver)
{
MessagePackBinary.EnsureCapacity(ref bytes, offset, 38);
bytes[offset] = MessagePackCode.Str8;
bytes[offset + 1] = unchecked((byte)36);
new GuidBits(ref value).Write(bytes, offset + 2);
return 38;
}
public Guid Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var segment = MessagePackBinary.ReadStringSegment(bytes, offset, out readSize);
return new GuidBits(segment).Value;
}
}
internal sealed class UriFormatter : IMessagePackFormatter<Uri>
{
public static readonly IMessagePackFormatter<Uri> Instance = new UriFormatter();
UriFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Uri value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteString(ref bytes, offset, value.ToString());
}
}
public Uri Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return new Uri(MessagePackBinary.ReadString(bytes, offset, out readSize), UriKind.RelativeOrAbsolute);
}
}
}
internal sealed class VersionFormatter : IMessagePackFormatter<Version>
{
public static readonly IMessagePackFormatter<Version> Instance = new VersionFormatter();
VersionFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Version value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteString(ref bytes, offset, value.ToString());
}
}
public Version Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return new Version(MessagePackBinary.ReadString(bytes, offset, out readSize));
}
}
}
internal sealed class KeyValuePairFormatter<TKey, TValue> : IMessagePackFormatter<KeyValuePair<TKey, TValue>>
{
public int Serialize(ref byte[] bytes, int offset, KeyValuePair<TKey, TValue> value, IFormatterResolver formatterResolver)
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 2);
offset += formatterResolver.GetFormatterWithVerify<TKey>().Serialize(ref bytes, offset, value.Key, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<TValue>().Serialize(ref bytes, offset, value.Value, formatterResolver);
return offset - startOffset;
}
public KeyValuePair<TKey, TValue> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
if (count != 2) throw new InvalidOperationException("Invalid KeyValuePair format.");
using (MessagePackSecurity.DepthStep())
{
var key = formatterResolver.GetFormatterWithVerify<TKey>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var value = formatterResolver.GetFormatterWithVerify<TValue>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new KeyValuePair<TKey, TValue>(key, value);
}
}
}
internal sealed class StringBuilderFormatter : IMessagePackFormatter<StringBuilder>
{
public static readonly IMessagePackFormatter<StringBuilder> Instance = new StringBuilderFormatter();
StringBuilderFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, StringBuilder value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return MessagePackBinary.WriteString(ref bytes, offset, value.ToString());
}
}
public StringBuilder Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
return new StringBuilder(MessagePackBinary.ReadString(bytes, offset, out readSize));
}
}
}
internal sealed class BitArrayFormatter : IMessagePackFormatter<BitArray>
{
public static readonly IMessagePackFormatter<BitArray> Instance = new BitArrayFormatter();
BitArrayFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, BitArray value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
var len = value.Length;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, len);
for (int i = 0; i < len; i++)
{
offset += MessagePackBinary.WriteBoolean(ref bytes, offset, value.Get(i));
}
return offset - startOffset;
}
}
public BitArray Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var len = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
var array = new BitArray(len);
for (int i = 0; i < len; i++)
{
array[i] = MessagePackBinary.ReadBoolean(bytes, offset, out readSize);
offset += readSize;
}
readSize = offset - startOffset;
return array;
}
}
}
#if NETSTANDARD || NETFRAMEWORK
internal sealed class BigIntegerFormatter : IMessagePackFormatter<System.Numerics.BigInteger>
{
public static readonly IMessagePackFormatter<System.Numerics.BigInteger> Instance = new BigIntegerFormatter();
BigIntegerFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, System.Numerics.BigInteger value, IFormatterResolver formatterResolver)
{
return MessagePackBinary.WriteBytes(ref bytes, offset, value.ToByteArray());
}
public System.Numerics.BigInteger Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
return new System.Numerics.BigInteger(MessagePackBinary.ReadBytes(bytes, offset, out readSize));
}
}
internal sealed class ComplexFormatter : IMessagePackFormatter<System.Numerics.Complex>
{
public static readonly IMessagePackFormatter<System.Numerics.Complex> Instance = new ComplexFormatter();
ComplexFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, System.Numerics.Complex value, IFormatterResolver formatterResolver)
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 2);
offset += MessagePackBinary.WriteDouble(ref bytes, offset, value.Real);
offset += MessagePackBinary.WriteDouble(ref bytes, offset, value.Imaginary);
return offset - startOffset;
}
public System.Numerics.Complex Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
offset += readSize;
if (count != 2) throw new InvalidOperationException("Invalid Complex format.");
var real = MessagePackBinary.ReadDouble(bytes, offset, out readSize);
offset += readSize;
var imaginary = MessagePackBinary.ReadDouble(bytes, offset, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new System.Numerics.Complex(real, imaginary);
}
}
internal sealed class LazyFormatter<T> : IMessagePackFormatter<Lazy<T>>
{
public int Serialize(ref byte[] bytes, int offset, Lazy<T> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
return formatterResolver.GetFormatterWithVerify<T>().Serialize(ref bytes, offset, value.Value, formatterResolver);
}
}
public Lazy<T> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
using (MessagePackSecurity.DepthStep())
{
// deserialize immediately(no delay, because capture byte[] causes memory leak)
var v = formatterResolver.GetFormatterWithVerify<T>().Deserialize(bytes, offset, formatterResolver, out readSize);
return new Lazy<T>(() => v);
}
}
}
}
internal sealed class TaskUnitFormatter : IMessagePackFormatter<Task>
{
public static readonly IMessagePackFormatter<Task> Instance = new TaskUnitFormatter();
static readonly Task CompletedTask = Task.FromResult<object>(null);
TaskUnitFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Task value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
value.Wait(); // wait...!
return MessagePackBinary.WriteNil(ref bytes, offset);
}
}
public Task Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (!MessagePackBinary.IsNil(bytes, offset))
{
throw new InvalidOperationException("Invalid input");
}
else
{
readSize = 1;
return CompletedTask;
}
}
}
internal sealed class TaskValueFormatter<T> : IMessagePackFormatter<Task<T>>
{
public int Serialize(ref byte[] bytes, int offset, Task<T> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
// value.Result -> wait...!
return formatterResolver.GetFormatterWithVerify<T>().Serialize(ref bytes, offset, value.Result, formatterResolver);
}
}
public Task<T> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var v = formatterResolver.GetFormatterWithVerify<T>().Deserialize(bytes, offset, formatterResolver, out readSize);
return Task.FromResult(v);
}
}
}
#endif
}

View File

@ -0,0 +1,457 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if NETSTANDARD || NETFRAMEWORK
using System;
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal sealed class TupleFormatter<T1> : IMessagePackFormatter<Tuple<T1>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 1);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 1) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1>(item1);
}
}
}
}
internal sealed class TupleFormatter<T1, T2> : IMessagePackFormatter<Tuple<T1, T2>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 2);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 2) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2>(item1, item2);
}
}
}
}
internal sealed class TupleFormatter<T1, T2, T3> : IMessagePackFormatter<Tuple<T1, T2, T3>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2, T3> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 3);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T3>().Serialize(ref bytes, offset, value.Item3, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2, T3> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 3) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item3 = formatterResolver.GetFormatterWithVerify<T3>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2, T3>(item1, item2, item3);
}
}
}
}
internal sealed class TupleFormatter<T1, T2, T3, T4> : IMessagePackFormatter<Tuple<T1, T2, T3, T4>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2, T3, T4> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 4);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T3>().Serialize(ref bytes, offset, value.Item3, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T4>().Serialize(ref bytes, offset, value.Item4, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2, T3, T4> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 4) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item3 = formatterResolver.GetFormatterWithVerify<T3>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item4 = formatterResolver.GetFormatterWithVerify<T4>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2, T3, T4>(item1, item2, item3, item4);
}
}
}
}
internal sealed class TupleFormatter<T1, T2, T3, T4, T5> : IMessagePackFormatter<Tuple<T1, T2, T3, T4, T5>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2, T3, T4, T5> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 5);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T3>().Serialize(ref bytes, offset, value.Item3, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T4>().Serialize(ref bytes, offset, value.Item4, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T5>().Serialize(ref bytes, offset, value.Item5, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2, T3, T4, T5> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 5) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item3 = formatterResolver.GetFormatterWithVerify<T3>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item4 = formatterResolver.GetFormatterWithVerify<T4>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item5 = formatterResolver.GetFormatterWithVerify<T5>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2, T3, T4, T5>(item1, item2, item3, item4, item5);
}
}
}
}
internal sealed class TupleFormatter<T1, T2, T3, T4, T5, T6> : IMessagePackFormatter<Tuple<T1, T2, T3, T4, T5, T6>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2, T3, T4, T5, T6> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 6);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T3>().Serialize(ref bytes, offset, value.Item3, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T4>().Serialize(ref bytes, offset, value.Item4, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T5>().Serialize(ref bytes, offset, value.Item5, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T6>().Serialize(ref bytes, offset, value.Item6, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2, T3, T4, T5, T6> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 6) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item3 = formatterResolver.GetFormatterWithVerify<T3>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item4 = formatterResolver.GetFormatterWithVerify<T4>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item5 = formatterResolver.GetFormatterWithVerify<T5>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item6 = formatterResolver.GetFormatterWithVerify<T6>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2, T3, T4, T5, T6>(item1, item2, item3, item4, item5, item6);
}
}
}
}
internal sealed class TupleFormatter<T1, T2, T3, T4, T5, T6, T7> : IMessagePackFormatter<Tuple<T1, T2, T3, T4, T5, T6, T7>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2, T3, T4, T5, T6, T7> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 7);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T3>().Serialize(ref bytes, offset, value.Item3, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T4>().Serialize(ref bytes, offset, value.Item4, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T5>().Serialize(ref bytes, offset, value.Item5, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T6>().Serialize(ref bytes, offset, value.Item6, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T7>().Serialize(ref bytes, offset, value.Item7, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2, T3, T4, T5, T6, T7> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 7) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item3 = formatterResolver.GetFormatterWithVerify<T3>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item4 = formatterResolver.GetFormatterWithVerify<T4>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item5 = formatterResolver.GetFormatterWithVerify<T5>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item6 = formatterResolver.GetFormatterWithVerify<T6>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item7 = formatterResolver.GetFormatterWithVerify<T7>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2, T3, T4, T5, T6, T7>(item1, item2, item3, item4, item5, item6, item7);
}
}
}
}
internal sealed class TupleFormatter<T1, T2, T3, T4, T5, T6, T7, TRest> : IMessagePackFormatter<Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>>
{
public int Serialize(ref byte[] bytes, int offset, Tuple<T1, T2, T3, T4, T5, T6, T7, TRest> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, 8);
offset += formatterResolver.GetFormatterWithVerify<T1>().Serialize(ref bytes, offset, value.Item1, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T2>().Serialize(ref bytes, offset, value.Item2, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T3>().Serialize(ref bytes, offset, value.Item3, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T4>().Serialize(ref bytes, offset, value.Item4, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T5>().Serialize(ref bytes, offset, value.Item5, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T6>().Serialize(ref bytes, offset, value.Item6, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<T7>().Serialize(ref bytes, offset, value.Item7, formatterResolver);
offset += formatterResolver.GetFormatterWithVerify<TRest>().Serialize(ref bytes, offset, value.Rest, formatterResolver);
return offset - startOffset;
}
}
public Tuple<T1, T2, T3, T4, T5, T6, T7, TRest> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != 8) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
var item1 = formatterResolver.GetFormatterWithVerify<T1>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item2 = formatterResolver.GetFormatterWithVerify<T2>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item3 = formatterResolver.GetFormatterWithVerify<T3>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item4 = formatterResolver.GetFormatterWithVerify<T4>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item5 = formatterResolver.GetFormatterWithVerify<T5>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item6 = formatterResolver.GetFormatterWithVerify<T6>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item7 = formatterResolver.GetFormatterWithVerify<T7>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
var item8 = formatterResolver.GetFormatterWithVerify<TRest>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
readSize = offset - startOffset;
return new Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>(item1, item2, item3, item4, item5, item6, item7, item8);
}
}
}
}
}
#endif

View File

@ -0,0 +1,71 @@
<#@ template debug="false" hostspecific="false" language="C#" #>
<#@ assembly name="System.Core" #>
<#@ import namespace="System.Linq" #>
<#@ import namespace="System.Text" #>
<#@ import namespace="System.Collections.Generic" #>
<#@ output extension=".cs" #>
#if NETSTANDARD || NETFRAMEWORK
using System;
namespace MessagePack.Formatters
{
<# for(var i = 1; i <= 8; i++) {
Func<int, string> toT = x => "T" + ((x == 8) ? "Rest" : x.ToString());
Func<int, string> toItem = x => ((x == 8) ? "Rest" : "Item" + x);
var ts = string.Join(", ", Enumerable.Range(1, i).Select(x => toT(x)));
var t = "Tuple<" + ts + ">";
#>
public sealed class TupleFormatter<<#= ts #>> : IMessagePackFormatter<<#= t #>>
{
public int Serialize(ref byte[] bytes, int offset, <#= t #> value, IFormatterResolver formatterResolver)
{
if (value == null)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
else
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, <#= i #>);
<# for(var j = 1; j <= i; j++) { #>
offset += formatterResolver.GetFormatterWithVerify<<#= toT(j) #>>().Serialize(ref bytes, offset, value.<#= toItem(j) #>, formatterResolver);
<# } #>
return offset - startOffset;
}
}
public <#= t #> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
readSize = 1;
return null;
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != <#= i #>) throw new InvalidOperationException("Invalid Tuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
<# for(var j = 1; j <= i; j++) { #>
var item<#= j #> = formatterResolver.GetFormatterWithVerify<<#= toT(j) #>>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
<# } #>
readSize = offset - startOffset;
return new Tuple<<#= ts #>>(<#= string.Join(", ", Enumerable.Range(1, i).Select(x => "item" + x)) #>);
}
}
}
}
<# } #>
}
#endif

View File

@ -0,0 +1,62 @@
<#@ template debug="false" hostspecific="false" language="C#" #>
<#@ assembly name="System.Core" #>
<#@ import namespace="System.Linq" #>
<#@ import namespace="System.Text" #>
<#@ import namespace="System.Collections.Generic" #>
<#@ output extension=".cs" #>
#if NETSTANDARD || NETFRAMEWORK
using System;
namespace MessagePack.Formatters
{
<# for(var i = 1; i <= 8; i++) {
Func<int, string> toT = x => "T" + ((x == 8) ? "Rest" : x.ToString());
Func<int, string> toItem = x => ((x == 8) ? "Rest" : "Item" + x);
var ts = string.Join(", ", Enumerable.Range(1, i).Select(x => toT(x)));
var t = "ValueTuple<" + ts + ">";
#>
public sealed class ValueTupleFormatter<<#= ts #>> : IMessagePackFormatter<<#= t #>><#= (t.Contains("TRest") ? " where TRest : struct" : "") #>
{
public int Serialize(ref byte[] bytes, int offset, <#= t #> value, IFormatterResolver formatterResolver)
{
var startOffset = offset;
offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, <#= i #>);
<# for(var j = 1; j <= i; j++) { #>
offset += formatterResolver.GetFormatterWithVerify<<#= toT(j) #>>().Serialize(ref bytes, offset, value.<#= toItem(j) #>, formatterResolver);
<# } #>
return offset - startOffset;
}
public <#= t #> Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
{
if (MessagePackBinary.IsNil(bytes, offset))
{
throw new InvalidOperationException("Data is Nil, ValueTuple can not be null.");
}
else
{
var startOffset = offset;
var count = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
if (count != <#= i #>) throw new InvalidOperationException("Invalid ValueTuple count");
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
<# for(var j = 1; j <= i; j++) { #>
var item<#= j #> = formatterResolver.GetFormatterWithVerify<<#= toT(j) #>>().Deserialize(bytes, offset, formatterResolver, out readSize);
offset += readSize;
<# } #>
readSize = offset - startOffset;
return new ValueTuple<<#= ts #>>(<#= string.Join(", ", Enumerable.Range(1, i).Select(x => "item" + x)) #>);
}
}
}
}
<# } #>
}
#endif

View File

@ -0,0 +1,485 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// <auto-generated />
/*
The xxHash32 implementation is based on the code published by Yann Collet:
https://raw.githubusercontent.com/Cyan4973/xxHash/5c174cfa4e45a42f94082dc0d4539b39696afea1/xxhash.c
xxHash - Fast Hash algorithm
Copyright (C) 2012-2016, Yann Collet
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- xxHash homepage: http://www.xxhash.com
- xxHash source repository : https://github.com/Cyan4973/xxHash
*/
#if !NETCOREAPP
using System.Collections.Generic;
using System.ComponentModel;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
namespace System
{
// xxHash32 is used for the hash code.
// https://github.com/Cyan4973/xxHash
internal struct HashCode
{
private static readonly uint s_seed = GenerateGlobalSeed();
private const uint Prime1 = 2654435761U;
private const uint Prime2 = 2246822519U;
private const uint Prime3 = 3266489917U;
private const uint Prime4 = 668265263U;
private const uint Prime5 = 374761393U;
private uint _v1, _v2, _v3, _v4;
private uint _queue1, _queue2, _queue3;
private uint _length;
private static uint GenerateGlobalSeed()
{
var bytes = new byte[4];
using (var rng = RandomNumberGenerator.Create())
{
rng.GetBytes(bytes);
}
return BitConverter.ToUInt32(bytes, 0);
}
public static int Combine<T1>(T1 value1)
{
unchecked
{
// Provide a way of diffusing bits from something with a limited
// input hash space. For example, many enums only have a few
// possible hashes, only using the bottom few bits of the code. Some
// collections are built on the assumption that hashes are spread
// over a larger space, so diffusing the bits may help the
// collection work more efficiently.
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hash = MixEmptyState();
hash += 4;
hash = QueueRound(hash, hc1);
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2>(T1 value1, T2 value2)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hash = MixEmptyState();
hash += 8;
hash = QueueRound(hash, hc1);
hash = QueueRound(hash, hc2);
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2, T3>(T1 value1, T2 value2, T3 value3)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hc3 = (uint)(value3?.GetHashCode() ?? 0);
uint hash = MixEmptyState();
hash += 12;
hash = QueueRound(hash, hc1);
hash = QueueRound(hash, hc2);
hash = QueueRound(hash, hc3);
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2, T3, T4>(T1 value1, T2 value2, T3 value3, T4 value4)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hc3 = (uint)(value3?.GetHashCode() ?? 0);
uint hc4 = (uint)(value4?.GetHashCode() ?? 0);
Initialize(out uint v1, out uint v2, out uint v3, out uint v4);
v1 = Round(v1, hc1);
v2 = Round(v2, hc2);
v3 = Round(v3, hc3);
v4 = Round(v4, hc4);
uint hash = MixState(v1, v2, v3, v4);
hash += 16;
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2, T3, T4, T5>(T1 value1, T2 value2, T3 value3, T4 value4, T5 value5)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hc3 = (uint)(value3?.GetHashCode() ?? 0);
uint hc4 = (uint)(value4?.GetHashCode() ?? 0);
uint hc5 = (uint)(value5?.GetHashCode() ?? 0);
Initialize(out uint v1, out uint v2, out uint v3, out uint v4);
v1 = Round(v1, hc1);
v2 = Round(v2, hc2);
v3 = Round(v3, hc3);
v4 = Round(v4, hc4);
uint hash = MixState(v1, v2, v3, v4);
hash += 20;
hash = QueueRound(hash, hc5);
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2, T3, T4, T5, T6>(T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hc3 = (uint)(value3?.GetHashCode() ?? 0);
uint hc4 = (uint)(value4?.GetHashCode() ?? 0);
uint hc5 = (uint)(value5?.GetHashCode() ?? 0);
uint hc6 = (uint)(value6?.GetHashCode() ?? 0);
Initialize(out uint v1, out uint v2, out uint v3, out uint v4);
v1 = Round(v1, hc1);
v2 = Round(v2, hc2);
v3 = Round(v3, hc3);
v4 = Round(v4, hc4);
uint hash = MixState(v1, v2, v3, v4);
hash += 24;
hash = QueueRound(hash, hc5);
hash = QueueRound(hash, hc6);
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2, T3, T4, T5, T6, T7>(T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hc3 = (uint)(value3?.GetHashCode() ?? 0);
uint hc4 = (uint)(value4?.GetHashCode() ?? 0);
uint hc5 = (uint)(value5?.GetHashCode() ?? 0);
uint hc6 = (uint)(value6?.GetHashCode() ?? 0);
uint hc7 = (uint)(value7?.GetHashCode() ?? 0);
Initialize(out uint v1, out uint v2, out uint v3, out uint v4);
v1 = Round(v1, hc1);
v2 = Round(v2, hc2);
v3 = Round(v3, hc3);
v4 = Round(v4, hc4);
uint hash = MixState(v1, v2, v3, v4);
hash += 28;
hash = QueueRound(hash, hc5);
hash = QueueRound(hash, hc6);
hash = QueueRound(hash, hc7);
hash = MixFinal(hash);
return (int)hash;
}
}
public static int Combine<T1, T2, T3, T4, T5, T6, T7, T8>(T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8)
{
unchecked
{
uint hc1 = (uint)(value1?.GetHashCode() ?? 0);
uint hc2 = (uint)(value2?.GetHashCode() ?? 0);
uint hc3 = (uint)(value3?.GetHashCode() ?? 0);
uint hc4 = (uint)(value4?.GetHashCode() ?? 0);
uint hc5 = (uint)(value5?.GetHashCode() ?? 0);
uint hc6 = (uint)(value6?.GetHashCode() ?? 0);
uint hc7 = (uint)(value7?.GetHashCode() ?? 0);
uint hc8 = (uint)(value8?.GetHashCode() ?? 0);
Initialize(out uint v1, out uint v2, out uint v3, out uint v4);
v1 = Round(v1, hc1);
v2 = Round(v2, hc2);
v3 = Round(v3, hc3);
v4 = Round(v4, hc4);
v1 = Round(v1, hc5);
v2 = Round(v2, hc6);
v3 = Round(v3, hc7);
v4 = Round(v4, hc8);
uint hash = MixState(v1, v2, v3, v4);
hash += 32;
hash = MixFinal(hash);
return (int)hash;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static void Initialize(out uint v1, out uint v2, out uint v3, out uint v4)
{
unchecked
{
v1 = s_seed + Prime1 + Prime2;
v2 = s_seed + Prime2;
v3 = s_seed;
v4 = s_seed - Prime1;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static uint Round(uint hash, uint input)
{
unchecked
{
return BitOperations.RotateLeft(hash + input * Prime2, 13) * Prime1;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static uint QueueRound(uint hash, uint queuedValue)
{
unchecked
{
return BitOperations.RotateLeft(hash + queuedValue * Prime3, 17) * Prime4;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static uint MixState(uint v1, uint v2, uint v3, uint v4)
{
unchecked
{
return BitOperations.RotateLeft(v1, 1) + BitOperations.RotateLeft(v2, 7) + BitOperations.RotateLeft(v3, 12) + BitOperations.RotateLeft(v4, 18);
}
}
private static uint MixEmptyState()
{
unchecked
{
return s_seed + Prime5;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static uint MixFinal(uint hash)
{
unchecked
{
hash ^= hash >> 15;
hash *= Prime2;
hash ^= hash >> 13;
hash *= Prime3;
hash ^= hash >> 16;
return hash;
}
}
public void Add<T>(T value)
{
Add(value?.GetHashCode() ?? 0);
}
public void Add<T>(T value, IEqualityComparer<T> comparer)
{
Add(comparer != null ? comparer.GetHashCode(value) : (value?.GetHashCode() ?? 0));
}
private void Add(int value)
{
unchecked
{
// The original xxHash works as follows:
// 0. Initialize immediately. We can't do this in a struct (no
// default ctor).
// 1. Accumulate blocks of length 16 (4 uints) into 4 accumulators.
// 2. Accumulate remaining blocks of length 4 (1 uint) into the
// hash.
// 3. Accumulate remaining blocks of length 1 into the hash.
// There is no need for #3 as this type only accepts ints. _queue1,
// _queue2 and _queue3 are basically a buffer so that when
// ToHashCode is called we can execute #2 correctly.
// We need to initialize the xxHash32 state (_v1 to _v4) lazily (see
// #0) nd the last place that can be done if you look at the
// original code is just before the first block of 16 bytes is mixed
// in. The xxHash32 state is never used for streams containing fewer
// than 16 bytes.
// To see what's really going on here, have a look at the Combine
// methods.
uint val = (uint)value;
// Storing the value of _length locally shaves of quite a few bytes
// in the resulting machine code.
uint previousLength = _length++;
uint position = previousLength % 4;
// Switch can't be inlined.
if (position == 0)
_queue1 = val;
else if (position == 1)
_queue2 = val;
else if (position == 2)
_queue3 = val;
else // position == 3
{
if (previousLength == 3)
Initialize(out _v1, out _v2, out _v3, out _v4);
_v1 = Round(_v1, _queue1);
_v2 = Round(_v2, _queue2);
_v3 = Round(_v3, _queue3);
_v4 = Round(_v4, val);
}
}
}
public int ToHashCode()
{
unchecked
{
// Storing the value of _length locally shaves of quite a few bytes
// in the resulting machine code.
uint length = _length;
// position refers to the *next* queue position in this method, so
// position == 1 means that _queue1 is populated; _queue2 would have
// been populated on the next call to Add.
uint position = length % 4;
// If the length is less than 4, _v1 to _v4 don't contain anything
// yet. xxHash32 treats this differently.
uint hash = length < 4 ? MixEmptyState() : MixState(_v1, _v2, _v3, _v4);
// _length is incremented once per Add(Int32) and is therefore 4
// times too small (xxHash length is in bytes, not ints).
hash += length * 4;
// Mix what remains in the queue
// Switch can't be inlined right now, so use as few branches as
// possible by manually excluding impossible scenarios (position > 1
// is always false if position is not > 0).
if (position > 0)
{
hash = QueueRound(hash, _queue1);
if (position > 1)
{
hash = QueueRound(hash, _queue2);
if (position > 2)
hash = QueueRound(hash, _queue3);
}
}
hash = MixFinal(hash);
return (int)hash;
}
}
#pragma warning disable 0809
// Obsolete member 'memberA' overrides non-obsolete member 'memberB'.
// Disallowing GetHashCode and Equals is by design
// * We decided to not override GetHashCode() to produce the hash code
// as this would be weird, both naming-wise as well as from a
// behavioral standpoint (GetHashCode() should return the object's
// hash code, not the one being computed).
// * Even though ToHashCode() can be called safely multiple times on
// this implementation, it is not part of the contract. If the
// implementation has to change in the future we don't want to worry
// about people who might have incorrectly used this type.
[Obsolete("HashCode is a mutable struct and should not be compared with other HashCodes. Use ToHashCode to retrieve the computed hash code.", error: true)]
[EditorBrowsable(EditorBrowsableState.Never)]
public override int GetHashCode() => throw new NotSupportedException();
[Obsolete("HashCode is a mutable struct and should not be compared with other HashCodes.", error: true)]
[EditorBrowsable(EditorBrowsableState.Never)]
public override bool Equals(object obj) => throw new NotSupportedException();
#pragma warning restore 0809
}
}
#endif

View File

@ -0,0 +1,67 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using System;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack
{
internal interface IFormatterResolver
{
IMessagePackFormatter<T> GetFormatter<T>();
}
internal static class FormatterResolverExtensions
{
public static IMessagePackFormatter<T> GetFormatterWithVerify<T>(this IFormatterResolver resolver)
{
IMessagePackFormatter<T> formatter;
try
{
formatter = resolver.GetFormatter<T>();
}
catch (TypeInitializationException ex)
{
#if NETSTANDARD || NETFRAMEWORK
// The fact that we're using static constructors to initialize this is an internal detail.
// Rethrow the inner exception if there is one.
// Do it carefully so as to not stomp on the original callstack.
System.Runtime.ExceptionServices.ExceptionDispatchInfo.Capture(ex.InnerException ?? ex).Throw();
throw new InvalidOperationException("Unreachable"); // keep the compiler happy
#else
var data = ex.Data; // suppress warning about not using `ex`
throw;
#endif
}
if (formatter == null)
{
throw new FormatterNotRegisteredException(typeof(T).FullName + " is not registered in this resolver. resolver:" + resolver.GetType().Name);
}
return formatter;
}
#if !UNITY_WSA
public static object GetFormatterDynamic(this IFormatterResolver resolver, Type type)
{
var methodInfo = typeof(IFormatterResolver).GetRuntimeMethod("GetFormatter", Type.EmptyTypes);
var formatter = methodInfo.MakeGenericMethod(type).Invoke(resolver, null);
return formatter;
}
#endif
}
internal class FormatterNotRegisteredException : Exception
{
public FormatterNotRegisteredException(string message) : base(message)
{
}
}
}

View File

@ -0,0 +1,16 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack
{
internal interface IMessagePackSerializationCallbackReceiver
{
void OnBeforeSerialize();
void OnAfterDeserialize();
}
}

View File

@ -0,0 +1,71 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal sealed class BufferPool : ArrayPool<byte>
{
public static readonly BufferPool Default = new BufferPool(65535);
public BufferPool(int bufferLength)
: base(bufferLength)
{
}
}
internal class ArrayPool<T>
{
readonly int bufferLength;
readonly object gate;
int index;
T[][] buffers;
public ArrayPool(int bufferLength)
{
this.bufferLength = bufferLength;
this.buffers = new T[4][];
this.gate = new object();
}
public T[] Rent()
{
lock (gate)
{
if (index >= buffers.Length)
{
Array.Resize(ref buffers, buffers.Length * 2);
}
if (buffers[index] == null)
{
buffers[index] = new T[bufferLength];
}
var buffer = buffers[index];
buffers[index] = null;
index++;
return buffer;
}
}
public void Return(T[] array)
{
if (array.Length != bufferLength)
{
throw new InvalidOperationException("return buffer is not from pool");
}
lock (gate)
{
if (index != 0)
{
buffers[--index] = array;
}
}
}
}
}

View File

@ -0,0 +1,143 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class ByteArrayComparer
{
#if ENABLE_UNSAFE_MSGPACK
#if NETSTANDARD || NETFRAMEWORK
static readonly bool Is32Bit = (IntPtr.Size == 4);
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
public static int GetHashCode(byte[] bytes, int offset, int count)
{
if (Is32Bit)
{
return unchecked((int)FarmHash.Hash32(bytes, offset, count));
}
else
{
return unchecked((int)FarmHash.Hash64(bytes, offset, count));
}
}
#endif
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
public static unsafe bool Equals(byte[] xs, int xsOffset, int xsCount, byte[] ys)
{
return Equals(xs, xsOffset, xsCount, ys, 0, ys.Length);
}
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
public static unsafe bool Equals(byte[] xs, int xsOffset, int xsCount, byte[] ys, int ysOffset, int ysCount)
{
if (xs == null || ys == null || xsCount != ysCount)
{
return false;
}
fixed (byte* p1 = &xs[xsOffset])
fixed (byte* p2 = &ys[ysOffset])
{
switch (xsCount)
{
case 0:
return true;
case 1:
return *p1 == *p2;
case 2:
return *(short*)p1 == *(short*)p2;
case 3:
if (*(byte*)p1 != *(byte*)p2) return false;
return *(short*)(p1 + 1) == *(short*)(p2 + 1);
case 4:
return *(int*)p1 == *(int*)p2;
case 5:
if (*(byte*)p1 != *(byte*)p2) return false;
return *(int*)(p1 + 1) == *(int*)(p2 + 1);
case 6:
if (*(short*)p1 != *(short*)p2) return false;
return *(int*)(p1 + 2) == *(int*)(p2 + 2);
case 7:
if (*(byte*)p1 != *(byte*)p2) return false;
if (*(short*)(p1 + 1) != *(short*)(p2 + 1)) return false;
return *(int*)(p1 + 3) == *(int*)(p2 + 3);
default:
{
var x1 = p1;
var x2 = p2;
byte* xEnd = p1 + xsCount - 8;
byte* yEnd = p2 + ysCount - 8;
while (x1 < xEnd)
{
if (*(long*)x1 != *(long*)x2)
{
return false;
}
x1 += 8;
x2 += 8;
}
return *(long*)xEnd == *(long*)yEnd;
}
}
}
}
#else
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
public static bool Equals(byte[] xs, int xsOffset, int xsCount, byte[] ys)
{
if (xs == null || ys == null || xsCount != ys.Length)
{
return false;
}
for (int i = 0; i < ys.Length; i++)
{
if (xs[xsOffset++] != ys[i]) return false;
}
return true;
}
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
public static bool Equals(byte[] xs, int xsOffset, int xsCount, byte[] ys, int ysOffset, int ysCount)
{
if (xs == null || ys == null || xsCount != ysCount)
{
return false;
}
for (int i = 0; i < xsCount; i++)
{
if (xs[xsOffset++] != ys[ysOffset++]) return false;
}
return true;
}
#endif
}
}

View File

@ -0,0 +1,196 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
// like ArraySegment<byte> hashtable.
// Add is safe for construction phase only and requires capacity(does not do rehash)
// and specialized for internal use(nongenerics, TValue is int)
// internal, but code generator requires this class
internal class ByteArrayStringHashTable : IEnumerable<KeyValuePair<string, int>>
{
readonly Entry[][] buckets; // immutable array(faster than linkedlist)
readonly ulong indexFor;
public ByteArrayStringHashTable(int capacity)
: this(capacity, 0.42f) // default: 0.75f -> 0.42f
{
}
public ByteArrayStringHashTable(int capacity, float loadFactor)
{
var tableSize = CalculateCapacity(capacity, loadFactor);
this.buckets = new Entry[tableSize][];
this.indexFor = (ulong)buckets.Length - 1;
}
public void Add(string key, int value)
{
if (!TryAddInternal(Encoding.UTF8.GetBytes(key), value))
{
throw new ArgumentException("Key was already exists. Key:" + key);
}
}
public void Add(byte[] key, int value)
{
if (!TryAddInternal(key, value))
{
throw new ArgumentException("Key was already exists. Key:" + key);
}
}
bool TryAddInternal(byte[] key, int value)
{
var h = ByteArrayGetHashCode(key, 0, key.Length);
var entry = new Entry { Key = key, Value = value };
var array = buckets[h & (indexFor)];
if (array == null)
{
buckets[h & (indexFor)] = new[] { entry };
}
else
{
// check duplicate
for (int i = 0; i < array.Length; i++)
{
var e = array[i].Key;
if (ByteArrayComparer.Equals(key, 0, key.Length, e))
{
return false;
}
}
var newArray = new Entry[array.Length + 1];
Array.Copy(array, newArray, array.Length);
array = newArray;
array[array.Length - 1] = entry;
buckets[h & (indexFor)] = array;
}
return true;
}
public bool TryGetValue(ArraySegment<byte> key, out int value)
{
var table = buckets;
var hash = ByteArrayGetHashCode(key.Array, key.Offset, key.Count);
var entry = table[hash & indexFor];
if (entry == null) goto NOT_FOUND;
{
#if NETSTANDARD || NETFRAMEWORK
ref var v = ref entry[0];
#else
var v = entry[0];
#endif
if (ByteArrayComparer.Equals(key.Array, key.Offset, key.Count, v.Key))
{
value = v.Value;
return true;
}
}
for (int i = 1; i < entry.Length; i++)
{
#if NETSTANDARD || NETFRAMEWORK
ref var v = ref entry[i];
#else
var v = entry[i];
#endif
if (ByteArrayComparer.Equals(key.Array, key.Offset, key.Count, v.Key))
{
value = v.Value;
return true;
}
}
NOT_FOUND:
value = default(int);
return false;
}
#if NETSTANDARD || NETFRAMEWORK
static readonly bool Is32Bit = (IntPtr.Size == 4);
#endif
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
static ulong ByteArrayGetHashCode(byte[] x, int offset, int count)
{
// FNV1-1a 32bit https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function
uint hash = 0;
if (x != null)
{
var max = offset + count;
hash = 2166136261;
for (int i = offset; i < max; i++)
{
hash = unchecked((x[i] ^ hash) * 16777619);
}
}
return (ulong)hash;
}
static int CalculateCapacity(int collectionSize, float loadFactor)
{
var initialCapacity = (int)(((float)collectionSize) / loadFactor);
var capacity = 1;
while (capacity < initialCapacity)
{
capacity <<= 1;
}
if (capacity < 8)
{
return 8;
}
return capacity;
}
// only for Debug use
public IEnumerator<KeyValuePair<string, int>> GetEnumerator()
{
var b = this.buckets;
foreach (var item in b)
{
if (item == null) continue;
foreach (var item2 in item)
{
yield return new KeyValuePair<string, int>(Encoding.UTF8.GetString(item2.Key), item2.Value);
}
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
struct Entry
{
public byte[] Key;
public int Value;
// for debugging
public override string ToString()
{
return "(" + Encoding.UTF8.GetString(Key) + ", " + Value + ")";
}
}
}
}

View File

@ -0,0 +1,92 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Linq.Expressions;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class ExpressionUtility
{
// Method
static MethodInfo GetMethodInfoCore(LambdaExpression expression)
{
if (expression == null)
{
throw new ArgumentNullException("expression");
}
return (expression.Body as MethodCallExpression).Method;
}
/// <summary>
/// Get MethodInfo from Expression for Static(with result) method.
/// </summary>
public static MethodInfo GetMethodInfo<T>(Expression<Func<T>> expression)
{
return GetMethodInfoCore(expression);
}
/// <summary>
/// Get MethodInfo from Expression for Static(void) method.
/// </summary>
public static MethodInfo GetMethodInfo(Expression<Action> expression)
{
return GetMethodInfoCore(expression);
}
/// <summary>
/// Get MethodInfo from Expression for Instance(with result) method.
/// </summary>
public static MethodInfo GetMethodInfo<T, TR>(Expression<Func<T, TR>> expression)
{
return GetMethodInfoCore(expression);
}
/// <summary>
/// Get MethodInfo from Expression for Instance(void) method.
/// </summary>
public static MethodInfo GetMethodInfo<T>(Expression<Action<T>> expression)
{
return GetMethodInfoCore(expression);
}
// WithArgument(for ref, out) helper
/// <summary>
/// Get MethodInfo from Expression for Instance(with result) method.
/// </summary>
public static MethodInfo GetMethodInfo<T, TArg1, TR>(Expression<Func<T, TArg1, TR>> expression)
{
return GetMethodInfoCore(expression);
}
// Property
static MemberInfo GetMemberInfoCore<T>(Expression<T> source)
{
if (source == null)
{
throw new ArgumentNullException("source");
}
var memberExpression = source.Body as MemberExpression;
return memberExpression.Member;
}
public static PropertyInfo GetPropertyInfo<T, TR>(Expression<Func<T, TR>> expression)
{
return GetMemberInfoCore(expression) as PropertyInfo;
}
// Field
public static FieldInfo GetFieldInfo<T, TR>(Expression<Func<T, TR>> expression)
{
return GetMemberInfoCore(expression) as FieldInfo;
}
}
}

View File

@ -0,0 +1,376 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Runtime.InteropServices;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
[StructLayout(LayoutKind.Explicit, Pack = 1)]
internal struct GuidBits
{
[FieldOffset(0)]
public readonly Guid Value;
[FieldOffset(0)]
public readonly byte Byte0;
[FieldOffset(1)]
public readonly byte Byte1;
[FieldOffset(2)]
public readonly byte Byte2;
[FieldOffset(3)]
public readonly byte Byte3;
[FieldOffset(4)]
public readonly byte Byte4;
[FieldOffset(5)]
public readonly byte Byte5;
[FieldOffset(6)]
public readonly byte Byte6;
[FieldOffset(7)]
public readonly byte Byte7;
[FieldOffset(8)]
public readonly byte Byte8;
[FieldOffset(9)]
public readonly byte Byte9;
[FieldOffset(10)]
public readonly byte Byte10;
[FieldOffset(11)]
public readonly byte Byte11;
[FieldOffset(12)]
public readonly byte Byte12;
[FieldOffset(13)]
public readonly byte Byte13;
[FieldOffset(14)]
public readonly byte Byte14;
[FieldOffset(15)]
public readonly byte Byte15;
// string.Join(", ", Enumerable.Range(0, 256).Select(x => (int)BitConverter.ToString(new byte[] { (byte)x }).ToLower()[0]))
static byte[] byteToHexStringHigh = new byte[256] { 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102 };
// string.Join(", ", Enumerable.Range(0, 256).Select(x => (int)BitConverter.ToString(new byte[] { (byte)x }).ToLower()[1]))
static byte[] byteToHexStringLow = new byte[256] { 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102 };
public GuidBits(ref Guid value)
{
this = default(GuidBits);
this.Value = value;
}
// 4-pattern, lower/upper and '-' or no
public GuidBits(ArraySegment<byte> utf8string)
{
this = default(GuidBits);
var array = utf8string.Array;
var offset = utf8string.Offset;
// 32
if (utf8string.Count == 32)
{
if (BitConverter.IsLittleEndian)
{
this.Byte0 = Parse(array, offset + 6);
this.Byte1 = Parse(array, offset + 4);
this.Byte2 = Parse(array, offset + 2);
this.Byte3 = Parse(array, offset + 0);
this.Byte4 = Parse(array, offset + 10);
this.Byte5 = Parse(array, offset + 8);
this.Byte6 = Parse(array, offset + 14);
this.Byte7 = Parse(array, offset + 12);
}
else
{
this.Byte0 = Parse(array, offset + 0);
this.Byte1 = Parse(array, offset + 2);
this.Byte2 = Parse(array, offset + 4);
this.Byte3 = Parse(array, offset + 6);
this.Byte4 = Parse(array, offset + 8);
this.Byte5 = Parse(array, offset + 10);
this.Byte6 = Parse(array, offset + 12);
this.Byte7 = Parse(array, offset + 14);
}
this.Byte8 = Parse(array, offset + 16);
this.Byte9 = Parse(array, offset + 18);
this.Byte10 = Parse(array, offset + 20);
this.Byte11 = Parse(array, offset + 22);
this.Byte12 = Parse(array, offset + 24);
this.Byte13 = Parse(array, offset + 26);
this.Byte14 = Parse(array, offset + 28);
this.Byte15 = Parse(array, offset + 30);
return;
}
else if (utf8string.Count == 36)
{
// '-' => 45
if (BitConverter.IsLittleEndian)
{
this.Byte0 = Parse(array, offset + 6);
this.Byte1 = Parse(array, offset + 4);
this.Byte2 = Parse(array, offset + 2);
this.Byte3 = Parse(array, offset + 0);
if (array[offset + 8] != '-') goto ERROR;
this.Byte4 = Parse(array, offset + 11);
this.Byte5 = Parse(array, offset + 9);
if (array[offset + 13] != '-') goto ERROR;
this.Byte6 = Parse(array, offset + 16);
this.Byte7 = Parse(array, offset + 14);
}
else
{
this.Byte0 = Parse(array, offset + 0);
this.Byte1 = Parse(array, offset + 2);
this.Byte2 = Parse(array, offset + 4);
this.Byte3 = Parse(array, offset + 6);
if (array[offset + 8] != '-') goto ERROR;
this.Byte4 = Parse(array, offset + 9);
this.Byte5 = Parse(array, offset + 11);
if (array[offset + 13] != '-') goto ERROR;
this.Byte6 = Parse(array, offset + 14);
this.Byte7 = Parse(array, offset + 16);
}
if (array[offset + 18] != '-') goto ERROR;
this.Byte8 = Parse(array, offset + 19);
this.Byte9 = Parse(array, offset + 21);
if (array[offset + 23] != '-') goto ERROR;
this.Byte10 = Parse(array, offset + 24);
this.Byte11 = Parse(array, offset + 26);
this.Byte12 = Parse(array, offset + 28);
this.Byte13 = Parse(array, offset + 30);
this.Byte14 = Parse(array, offset + 32);
this.Byte15 = Parse(array, offset + 34);
return;
}
ERROR:
throw new ArgumentException("Invalid Guid Pattern.");
}
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
static byte Parse(byte[] bytes, int highOffset)
{
return unchecked((byte)(SwitchParse(bytes[highOffset]) * 16 + SwitchParse(bytes[highOffset + 1])));
}
#if NETSTANDARD || NETFRAMEWORK
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]
#endif
static byte SwitchParse(byte b)
{
// '0'(48) ~ '9'(57) => -48
// 'A'(65) ~ 'F'(70) => -55
// 'a'(97) ~ 'f'(102) => -87
switch (b)
{
case 48:
case 49:
case 50:
case 51:
case 52:
case 53:
case 54:
case 55:
case 56:
case 57:
return unchecked((byte)((b - 48)));
case 65:
case 66:
case 67:
case 68:
case 69:
case 70:
return unchecked((byte)((b - 55)));
case 97:
case 98:
case 99:
case 100:
case 101:
case 102:
return unchecked((byte)((b - 87)));
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:
case 9:
case 10:
case 11:
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case 34:
case 35:
case 36:
case 37:
case 38:
case 39:
case 40:
case 41:
case 42:
case 43:
case 44:
case 45:
case 46:
case 47:
case 58:
case 59:
case 60:
case 61:
case 62:
case 63:
case 64:
case 71:
case 72:
case 73:
case 74:
case 75:
case 76:
case 77:
case 78:
case 79:
case 80:
case 81:
case 82:
case 83:
case 84:
case 85:
case 86:
case 87:
case 88:
case 89:
case 90:
case 91:
case 92:
case 93:
case 94:
case 95:
case 96:
default:
throw new ArgumentException("Invalid Guid Pattern.");
}
}
// 4(x2) - 2(x2) - 2(x2) - 2(x2) - 6(x2)
public void Write(byte[] buffer, int offset)
{
if (BitConverter.IsLittleEndian)
{
// int(_a)
buffer[offset + 6] = byteToHexStringHigh[Byte0];
buffer[offset + 7] = byteToHexStringLow[Byte0];
buffer[offset + 4] = byteToHexStringHigh[Byte1];
buffer[offset + 5] = byteToHexStringLow[Byte1];
buffer[offset + 2] = byteToHexStringHigh[Byte2];
buffer[offset + 3] = byteToHexStringLow[Byte2];
buffer[offset + 0] = byteToHexStringHigh[Byte3];
buffer[offset + 1] = byteToHexStringLow[Byte3];
buffer[offset + 8] = (byte)'-';
// short(_b)
buffer[offset + 11] = byteToHexStringHigh[Byte4];
buffer[offset + 12] = byteToHexStringLow[Byte4];
buffer[offset + 9] = byteToHexStringHigh[Byte5];
buffer[offset + 10] = byteToHexStringLow[Byte5];
buffer[offset + 13] = (byte)'-';
// short(_c)
buffer[offset + 16] = byteToHexStringHigh[Byte6];
buffer[offset + 17] = byteToHexStringLow[Byte6];
buffer[offset + 14] = byteToHexStringHigh[Byte7];
buffer[offset + 15] = byteToHexStringLow[Byte7];
}
else
{
buffer[offset + 0] = byteToHexStringHigh[Byte0];
buffer[offset + 1] = byteToHexStringLow[Byte0];
buffer[offset + 2] = byteToHexStringHigh[Byte1];
buffer[offset + 3] = byteToHexStringLow[Byte1];
buffer[offset + 4] = byteToHexStringHigh[Byte2];
buffer[offset + 5] = byteToHexStringLow[Byte2];
buffer[offset + 6] = byteToHexStringHigh[Byte3];
buffer[offset + 7] = byteToHexStringLow[Byte3];
buffer[offset + 8] = (byte)'-';
buffer[offset + 9] = byteToHexStringHigh[Byte4];
buffer[offset + 10] = byteToHexStringLow[Byte4];
buffer[offset + 11] = byteToHexStringHigh[Byte5];
buffer[offset + 12] = byteToHexStringLow[Byte5];
buffer[offset + 13] = (byte)'-';
buffer[offset + 14] = byteToHexStringHigh[Byte6];
buffer[offset + 15] = byteToHexStringLow[Byte6];
buffer[offset + 16] = byteToHexStringHigh[Byte7];
buffer[offset + 17] = byteToHexStringLow[Byte7];
}
buffer[offset + 18] = (byte)'-';
buffer[offset + 19] = byteToHexStringHigh[Byte8];
buffer[offset + 20] = byteToHexStringLow[Byte8];
buffer[offset + 21] = byteToHexStringHigh[Byte9];
buffer[offset + 22] = byteToHexStringLow[Byte9];
buffer[offset + 23] = (byte)'-';
buffer[offset + 24] = byteToHexStringHigh[Byte10];
buffer[offset + 25] = byteToHexStringLow[Byte10];
buffer[offset + 26] = byteToHexStringHigh[Byte11];
buffer[offset + 27] = byteToHexStringLow[Byte11];
buffer[offset + 28] = byteToHexStringHigh[Byte12];
buffer[offset + 29] = byteToHexStringLow[Byte12];
buffer[offset + 30] = byteToHexStringHigh[Byte13];
buffer[offset + 31] = byteToHexStringLow[Byte13];
buffer[offset + 32] = byteToHexStringHigh[Byte14];
buffer[offset + 33] = byteToHexStringLow[Byte14];
buffer[offset + 34] = byteToHexStringHigh[Byte15];
buffer[offset + 35] = byteToHexStringLow[Byte15];
}
}
}

View File

@ -0,0 +1,61 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if !UNITY_WSA
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class ReflectionExtensions
{
public static bool IsNullable(this System.Reflection.TypeInfo type)
{
return type.IsGenericType && type.GetGenericTypeDefinition() == typeof(System.Nullable<>);
}
public static bool IsPublic(this System.Reflection.TypeInfo type)
{
return type.IsPublic;
}
public static bool IsAnonymous(this System.Reflection.TypeInfo type)
{
return type.GetCustomAttribute<CompilerGeneratedAttribute>() != null
&& type.IsGenericType && type.Name.Contains("AnonymousType")
&& (type.Name.StartsWith("<>") || type.Name.StartsWith("VB$"))
&& (type.Attributes & TypeAttributes.NotPublic) == TypeAttributes.NotPublic;
}
public static bool IsIndexer(this System.Reflection.PropertyInfo propertyInfo)
{
return propertyInfo.GetIndexParameters().Length > 0;
}
#if NETSTANDARD || NETFRAMEWORK
public static bool IsConstructedGenericType(this System.Reflection.TypeInfo type)
{
return type.AsType().IsConstructedGenericType;
}
public static MethodInfo GetGetMethod(this PropertyInfo propInfo)
{
return propInfo.GetMethod;
}
public static MethodInfo GetSetMethod(this PropertyInfo propInfo)
{
return propInfo.SetMethod;
}
#endif
}
}
#endif

View File

@ -0,0 +1,236 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
// Safe for multiple-read, single-write.
internal class ThreadsafeTypeKeyHashTable<TValue>
{
Entry[] buckets;
int size; // only use in writer lock
readonly object writerLock = new object();
readonly float loadFactor;
// IEqualityComparer.Equals is overhead if key only Type, don't use it.
// readonly IEqualityComparer<TKey> comparer;
public ThreadsafeTypeKeyHashTable(int capacity = 4, float loadFactor = 0.75f)
{
var tableSize = CalculateCapacity(capacity, loadFactor);
this.buckets = new Entry[tableSize];
this.loadFactor = loadFactor;
}
public bool TryAdd(Type key, TValue value)
{
return TryAdd(key, _ => value); // create lambda capture
}
public bool TryAdd(Type key, Func<Type, TValue> valueFactory)
{
TValue _;
return TryAddInternal(key, valueFactory, out _);
}
bool TryAddInternal(Type key, Func<Type, TValue> valueFactory, out TValue resultingValue)
{
lock (writerLock)
{
var nextCapacity = CalculateCapacity(size + 1, loadFactor);
if (buckets.Length < nextCapacity)
{
// rehash
var nextBucket = new Entry[nextCapacity];
for (int i = 0; i < buckets.Length; i++)
{
var e = buckets[i];
while (e != null)
{
var newEntry = new Entry { Key = e.Key, Value = e.Value, Hash = e.Hash };
AddToBuckets(nextBucket, key, newEntry, null, out resultingValue);
e = e.Next;
}
}
// add entry(if failed to add, only do resize)
var successAdd = AddToBuckets(nextBucket, key, null, valueFactory, out resultingValue);
// replace field(threadsafe for read)
VolatileWrite(ref buckets, nextBucket);
if (successAdd) size++;
return successAdd;
}
else
{
// add entry(insert last is thread safe for read)
var successAdd = AddToBuckets(buckets, key, null, valueFactory, out resultingValue);
if (successAdd) size++;
return successAdd;
}
}
}
bool AddToBuckets(Entry[] buckets, Type newKey, Entry newEntryOrNull, Func<Type, TValue> valueFactory, out TValue resultingValue)
{
var h = (newEntryOrNull != null) ? newEntryOrNull.Hash : newKey.GetHashCode();
if (buckets[h & (buckets.Length - 1)] == null)
{
if (newEntryOrNull != null)
{
resultingValue = newEntryOrNull.Value;
VolatileWrite(ref buckets[h & (buckets.Length - 1)], newEntryOrNull);
}
else
{
resultingValue = valueFactory(newKey);
VolatileWrite(ref buckets[h & (buckets.Length - 1)], new Entry { Key = newKey, Value = resultingValue, Hash = h });
}
}
else
{
var searchLastEntry = buckets[h & (buckets.Length - 1)];
while (true)
{
if (searchLastEntry.Key == newKey)
{
resultingValue = searchLastEntry.Value;
return false;
}
if (searchLastEntry.Next == null)
{
if (newEntryOrNull != null)
{
resultingValue = newEntryOrNull.Value;
VolatileWrite(ref searchLastEntry.Next, newEntryOrNull);
}
else
{
resultingValue = valueFactory(newKey);
VolatileWrite(ref searchLastEntry.Next, new Entry { Key = newKey, Value = resultingValue, Hash = h });
}
break;
}
searchLastEntry = searchLastEntry.Next;
}
}
return true;
}
public bool TryGetValue(Type key, out TValue value)
{
var table = buckets;
var hash = key.GetHashCode();
var entry = table[hash & table.Length - 1];
if (entry == null) goto NOT_FOUND;
if (entry.Key == key)
{
value = entry.Value;
return true;
}
var next = entry.Next;
while (next != null)
{
if (next.Key == key)
{
value = next.Value;
return true;
}
next = next.Next;
}
NOT_FOUND:
value = default(TValue);
return false;
}
public TValue GetOrAdd(Type key, Func<Type, TValue> valueFactory)
{
TValue v;
if (TryGetValue(key, out v))
{
return v;
}
TryAddInternal(key, valueFactory, out v);
return v;
}
static int CalculateCapacity(int collectionSize, float loadFactor)
{
var initialCapacity = (int)(((float)collectionSize) / loadFactor);
var capacity = 1;
while (capacity < initialCapacity)
{
capacity <<= 1;
}
if (capacity < 8)
{
return 8;
}
return capacity;
}
static void VolatileWrite(ref Entry location, Entry value)
{
#if NETSTANDARD || NETFRAMEWORK
System.Threading.Volatile.Write(ref location, value);
#elif UNITY_WSA || NET_4_6
System.Threading.Volatile.Write(ref location, value);
#else
System.Threading.Thread.MemoryBarrier();
location = value;
#endif
}
static void VolatileWrite(ref Entry[] location, Entry[] value)
{
#if NETSTANDARD || NETFRAMEWORK
System.Threading.Volatile.Write(ref location, value);
#elif UNITY_WSA || NET_4_6
System.Threading.Volatile.Write(ref location, value);
#else
System.Threading.Thread.MemoryBarrier();
location = value;
#endif
}
class Entry
{
public Type Key;
public TValue Value;
public int Hash;
public Entry Next;
// debug only
public override string ToString()
{
return Key + "(" + Count() + ")";
}
int Count()
{
var count = 1;
var n = this;
while (n.Next != null)
{
count++;
n = n.Next;
}
return count;
}
}
}
}

View File

@ -0,0 +1,359 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Globalization;
using System.IO;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack
{
// simple, tiny JSON reader for MessagePackSerializer.FromJson.
// this is simple, compact and enough fast but not optimized extremely.
internal enum TinyJsonToken
{
None,
StartObject, // {
EndObject, // }
StartArray, // [
EndArray, // ]
Number, // -0~9
String, // "___"
True, // true
False, // false
Null, // null
}
internal enum ValueType : byte
{
Null,
True,
False,
Double,
Long,
ULong,
Decimal,
String
}
internal class TinyJsonException : Exception
{
public TinyJsonException(string message) : base(message)
{
}
}
internal class TinyJsonReader : IDisposable
{
readonly TextReader reader;
readonly bool disposeInnerReader;
StringBuilder reusableBuilder;
public TinyJsonToken TokenType { get; private set; }
public ValueType ValueType { get; private set; }
public double DoubleValue { get; private set; }
public long LongValue { get; private set; }
public ulong ULongValue { get; private set; }
public decimal DecimalValue { get; private set; }
public string StringValue { get; private set; }
public TinyJsonReader(TextReader reader, bool disposeInnerReader = true)
{
this.reader = reader;
this.disposeInnerReader = disposeInnerReader;
}
public bool Read()
{
ReadNextToken();
ReadValue();
return TokenType != TinyJsonToken.None;
}
public void Dispose()
{
if (reader != null && disposeInnerReader)
{
reader.Dispose();
}
TokenType = TinyJsonToken.None;
ValueType = ValueType.Null;
}
void SkipWhiteSpace()
{
var c = reader.Peek();
while (c != -1 && Char.IsWhiteSpace((char)c))
{
reader.Read();
c = reader.Peek();
}
}
char ReadChar()
{
return (char)reader.Read();
}
static bool IsWordBreak(char c)
{
switch (c)
{
case ' ':
case '{':
case '}':
case '[':
case ']':
case ',':
case ':':
case '\"':
return true;
default:
return false;
}
}
void ReadNextToken()
{
SkipWhiteSpace();
var intChar = reader.Peek();
if (intChar == -1)
{
TokenType = TinyJsonToken.None;
return;
}
var c = (char)intChar;
switch (c)
{
case '{':
TokenType = TinyJsonToken.StartObject;
return;
case '}':
TokenType = TinyJsonToken.EndObject;
return;
case '[':
TokenType = TinyJsonToken.StartArray;
return;
case ']':
TokenType = TinyJsonToken.EndArray;
return;
case '"':
TokenType = TinyJsonToken.String;
return;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
TokenType = TinyJsonToken.Number;
return;
case 't':
TokenType = TinyJsonToken.True;
return;
case 'f':
TokenType = TinyJsonToken.False;
return;
case 'n':
TokenType = TinyJsonToken.Null;
return;
case ',':
case ':':
reader.Read();
ReadNextToken();
return;
default:
throw new TinyJsonException("Invalid String:" + c);
}
}
void ReadValue()
{
ValueType = ValueType.Null;
switch (TokenType)
{
case TinyJsonToken.None:
break;
case TinyJsonToken.StartObject:
case TinyJsonToken.EndObject:
case TinyJsonToken.StartArray:
case TinyJsonToken.EndArray:
reader.Read();
break;
case TinyJsonToken.Number:
ReadNumber();
break;
case TinyJsonToken.String:
ReadString();
break;
case TinyJsonToken.True:
if (ReadChar() != 't') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'r') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'u') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'e') throw new TinyJsonException("Invalid Token");
ValueType = ValueType.True;
break;
case TinyJsonToken.False:
if (ReadChar() != 'f') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'a') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'l') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 's') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'e') throw new TinyJsonException("Invalid Token");
ValueType = ValueType.False;
break;
case TinyJsonToken.Null:
if (ReadChar() != 'n') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'u') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'l') throw new TinyJsonException("Invalid Token");
if (ReadChar() != 'l') throw new TinyJsonException("Invalid Token");
ValueType = ValueType.Null;
break;
default:
throw new ArgumentException("InvalidTokenState:" + TokenType);
}
}
void ReadNumber()
{
StringBuilder numberWord;
if (reusableBuilder == null)
{
reusableBuilder = new StringBuilder();
numberWord = reusableBuilder;
}
else
{
numberWord = reusableBuilder;
numberWord.Length = 0; // Clear
}
var isDouble = false;
var intChar = reader.Peek();
while (intChar != -1 && !IsWordBreak((char)intChar))
{
var c = ReadChar();
numberWord.Append(c);
if (c == '.' || c == 'e' || c == 'E') isDouble = true;
intChar = reader.Peek();
}
var number = numberWord.ToString();
if (isDouble)
{
double parsedDouble;
Double.TryParse(number, NumberStyles.AllowLeadingWhite | NumberStyles.AllowTrailingWhite | NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowThousands | NumberStyles.AllowExponent, System.Globalization.CultureInfo.InvariantCulture, out parsedDouble);
ValueType = ValueType.Double;
DoubleValue = parsedDouble;
}
else
{
long parsedInt;
if (Int64.TryParse(number, NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out parsedInt))
{
ValueType = ValueType.Long;
LongValue = parsedInt;
return;
}
ulong parsedULong;
if (ulong.TryParse(number, NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out parsedULong))
{
ValueType = ValueType.ULong;
ULongValue = parsedULong;
return;
}
Decimal parsedDecimal;
if (decimal.TryParse(number, NumberStyles.Number, System.Globalization.CultureInfo.InvariantCulture, out parsedDecimal))
{
ValueType = ValueType.Decimal;
DecimalValue = parsedDecimal;
return;
}
}
}
void ReadString()
{
reader.Read(); // skip ["]
StringBuilder sb;
if (reusableBuilder == null)
{
reusableBuilder = new StringBuilder();
sb = reusableBuilder;
}
else
{
sb = reusableBuilder;
sb.Length = 0; // Clear
}
while (true)
{
if (reader.Peek() == -1) throw new TinyJsonException("Invalid Json String");
var c = ReadChar();
switch (c)
{
case '"': // endtoken
goto END;
case '\\': // escape character
if (reader.Peek() == -1) throw new TinyJsonException("Invalid Json String");
c = ReadChar();
switch (c)
{
case '"':
case '\\':
case '/':
sb.Append(c);
break;
case 'b':
sb.Append('\b');
break;
case 'f':
sb.Append('\f');
break;
case 'n':
sb.Append('\n');
break;
case 'r':
sb.Append('\r');
break;
case 't':
sb.Append('\t');
break;
case 'u':
var hex = new char[4];
hex[0] = ReadChar();
hex[1] = ReadChar();
hex[2] = ReadChar();
hex[3] = ReadChar();
sb.Append((char)Convert.ToInt32(new string(hex), 16));
break;
}
break;
default: // string
sb.Append(c);
break;
}
}
END:
ValueType = ValueType.String;
StringValue = sb.ToString();
}
}
}

View File

@ -0,0 +1,67 @@
<#@ template debug="false" hostspecific="false" language="C#" #>
<#@ assembly name="System.Core" #>
<#@ import namespace="System.Linq" #>
<#@ import namespace="System.Text" #>
<#@ import namespace="System.Collections.Generic" #>
<#@ output extension=".cs" #>
<#
var Max = 31;
#>
#if NETSTANDARD || NETFRAMEWORK
using System.Runtime.CompilerServices;
namespace MessagePack.Internal
{
public static partial class UnsafeMemory32
{
<# for(var i = 4; i <= Max; i++) { #>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe int WriteRaw<#= i #>(ref byte[] dst, int dstOffset, byte[] src)
{
MessagePackBinary.EnsureCapacity(ref dst, dstOffset, src.Length);
fixed (byte* pSrc = &src[0])
fixed (byte* pDst = &dst[dstOffset])
{
<# for(var j = 0; j < (i / 4); j++) { #>
*(int*)(pDst + <#= (4 * j) #>) = *(int*)(pSrc + <#= (4 * j) #>);
<# } #>
<# if(i % 4 != 0) { #>
*(int*)(pDst + <#= i - 4 #>) = *(int*)(pSrc + <#= (i - 4) #>);
<# } #>
}
return src.Length;
}
<# } #>
}
public static partial class UnsafeMemory64
{
<# for(var i = 8; i <= Max; i++) { #>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static unsafe int WriteRaw<#= i #>(ref byte[] dst, int dstOffset, byte[] src)
{
MessagePackBinary.EnsureCapacity(ref dst, dstOffset, src.Length);
fixed (byte* pSrc = &src[0])
fixed (byte* pDst = &dst[dstOffset])
{
<# for(var j = 0; j < (i / 8); j++) { #>
*(long*)(pDst + <#= (8 * j) #>) = *(long*)(pSrc + <#= (8 * j) #>);
<# } #>
<# if(i % 8 != 0) { #>
*(long*)(pDst + <#= i - 8 #>) = *(long*)(pSrc + <#= (i - 8) #>);
<# } #>
}
return src.Length;
}
<# } #>
}
}
#endif

View File

@ -0,0 +1,89 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
internal static partial class LZ4Codec
{
// use 'Safe' code for Unity because in IL2CPP gots strange behaviour.
public static int Encode(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int outputLength)
{
if (IntPtr.Size == 4)
{
return LZ4Codec.Encode32Safe(input, inputOffset, inputLength, output, outputOffset, outputLength);
}
else
{
return LZ4Codec.Encode64Safe(input, inputOffset, inputLength, output, outputOffset, outputLength);
}
}
public static int Decode(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int outputLength)
{
if (IntPtr.Size == 4)
{
return LZ4Codec.Decode32Safe(input, inputOffset, inputLength, output, outputOffset, outputLength);
}
else
{
return LZ4Codec.Decode64Safe(input, inputOffset, inputLength, output, outputOffset, outputLength);
}
}
internal static class HashTablePool
{
[ThreadStatic]
static ushort[] ushortPool;
[ThreadStatic]
static uint[] uintPool;
[ThreadStatic]
static int[] intPool;
public static ushort[] GetUShortHashTablePool()
{
if (ushortPool == null)
{
ushortPool = new ushort[HASH64K_TABLESIZE];
}
else
{
Array.Clear(ushortPool, 0, ushortPool.Length);
}
return ushortPool;
}
public static uint[] GetUIntHashTablePool()
{
if (uintPool == null)
{
uintPool = new uint[HASH_TABLESIZE];
}
else
{
Array.Clear(uintPool, 0, uintPool.Length);
}
return uintPool;
}
public static int[] GetIntHashTablePool()
{
if (intPool == null)
{
intPool = new int[HASH_TABLESIZE];
}
else
{
Array.Clear(intPool, 0, intPool.Length);
}
return intPool;
}
}
}
}

View File

@ -0,0 +1,441 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#region license
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
using System;
using System.Diagnostics;
// ReSharper disable CheckNamespace
// ReSharper disable InconsistentNaming
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
/// <summary>Safe LZ4 codec.</summary>
internal static partial class LZ4Codec
{
#region Helper
// ReSharper disable UnusedParameter.Local
[Conditional("DEBUG")]
private static void Assert(bool condition, string errorMessage)
{
if (!condition) throw new ArgumentException(errorMessage);
Debug.Assert(condition, errorMessage);
}
// ReSharper restore UnusedParameter.Local
#endregion
#region Byte manipulation
// ReSharper disable RedundantCast
internal static void Poke2(byte[] buffer, int offset, ushort value)
{
buffer[offset] = (byte)value;
buffer[offset + 1] = (byte)(value >> 8);
}
internal static ushort Peek2(byte[] buffer, int offset)
{
// NOTE: It's faster than BitConverter.ToUInt16 (suprised? me too)
return (ushort)(((uint)buffer[offset]) | ((uint)buffer[offset + 1] << 8));
}
internal static uint Peek4(byte[] buffer, int offset)
{
// NOTE: It's faster than BitConverter.ToUInt32 (suprised? me too)
return
((uint)buffer[offset]) |
((uint)buffer[offset + 1] << 8) |
((uint)buffer[offset + 2] << 16) |
((uint)buffer[offset + 3] << 24);
}
private static uint Xor4(byte[] buffer, int offset1, int offset2)
{
// return Peek4(buffer, offset1) ^ Peek4(buffer, offset2);
var value1 =
((uint)buffer[offset1]) |
((uint)buffer[offset1 + 1] << 8) |
((uint)buffer[offset1 + 2] << 16) |
((uint)buffer[offset1 + 3] << 24);
var value2 =
((uint)buffer[offset2]) |
((uint)buffer[offset2 + 1] << 8) |
((uint)buffer[offset2 + 2] << 16) |
((uint)buffer[offset2 + 3] << 24);
return value1 ^ value2;
}
private static ulong Xor8(byte[] buffer, int offset1, int offset2)
{
// return Peek8(buffer, offset1) ^ Peek8(buffer, offset2);
var value1 =
((ulong)buffer[offset1]) |
((ulong)buffer[offset1 + 1] << 8) |
((ulong)buffer[offset1 + 2] << 16) |
((ulong)buffer[offset1 + 3] << 24) |
((ulong)buffer[offset1 + 4] << 32) |
((ulong)buffer[offset1 + 5] << 40) |
((ulong)buffer[offset1 + 6] << 48) |
((ulong)buffer[offset1 + 7] << 56);
var value2 =
((ulong)buffer[offset2]) |
((ulong)buffer[offset2 + 1] << 8) |
((ulong)buffer[offset2 + 2] << 16) |
((ulong)buffer[offset2 + 3] << 24) |
((ulong)buffer[offset2 + 4] << 32) |
((ulong)buffer[offset2 + 5] << 40) |
((ulong)buffer[offset2 + 6] << 48) |
((ulong)buffer[offset2 + 7] << 56);
return value1 ^ value2;
}
private static bool Equal2(byte[] buffer, int offset1, int offset2)
{
// return Peek2(buffer, offset1) == Peek2(buffer, offset2);
if (buffer[offset1] != buffer[offset2]) return false;
return buffer[offset1 + 1] == buffer[offset2 + 1];
}
private static bool Equal4(byte[] buffer, int offset1, int offset2)
{
// return Peek4(buffer, offset1) == Peek4(buffer, offset2);
if (buffer[offset1] != buffer[offset2]) return false;
if (buffer[offset1 + 1] != buffer[offset2 + 1]) return false;
if (buffer[offset1 + 2] != buffer[offset2 + 2]) return false;
return buffer[offset1 + 3] == buffer[offset2 + 3];
}
// ReSharper restore RedundantCast
#endregion
#region Byte block copy
private static void Copy4(byte[] buf, int src, int dst)
{
Assert(dst > src, "Copying backwards is not implemented");
buf[dst + 3] = buf[src + 3];
buf[dst + 2] = buf[src + 2];
buf[dst + 1] = buf[src + 1];
buf[dst] = buf[src];
}
private static void Copy8(byte[] buf, int src, int dst)
{
Assert(dst > src, "Copying backwards is not implemented");
buf[dst + 7] = buf[src + 7];
buf[dst + 6] = buf[src + 6];
buf[dst + 5] = buf[src + 5];
buf[dst + 4] = buf[src + 4];
buf[dst + 3] = buf[src + 3];
buf[dst + 2] = buf[src + 2];
buf[dst + 1] = buf[src + 1];
buf[dst] = buf[src];
}
private static void BlockCopy(byte[] src, int src_0, byte[] dst, int dst_0, int len)
{
Assert(src != dst, "BlockCopy does not handle copying to the same buffer");
if (len >= BLOCK_COPY_LIMIT)
{
Buffer.BlockCopy(src, src_0, dst, dst_0, len);
}
else
{
while (len >= 8)
{
dst[dst_0] = src[src_0];
dst[dst_0 + 1] = src[src_0 + 1];
dst[dst_0 + 2] = src[src_0 + 2];
dst[dst_0 + 3] = src[src_0 + 3];
dst[dst_0 + 4] = src[src_0 + 4];
dst[dst_0 + 5] = src[src_0 + 5];
dst[dst_0 + 6] = src[src_0 + 6];
dst[dst_0 + 7] = src[src_0 + 7];
len -= 8;
src_0 += 8;
dst_0 += 8;
}
while (len >= 4)
{
dst[dst_0] = src[src_0];
dst[dst_0 + 1] = src[src_0 + 1];
dst[dst_0 + 2] = src[src_0 + 2];
dst[dst_0 + 3] = src[src_0 + 3];
len -= 4;
src_0 += 4;
dst_0 += 4;
}
while (len-- > 0)
{
dst[dst_0++] = src[src_0++];
}
}
}
private static int WildCopy(byte[] src, int src_0, byte[] dst, int dst_0, int dst_end)
{
var len = dst_end - dst_0;
Assert(src != dst, "BlockCopy does not handle copying to the same buffer");
Assert(len > 0, "Length have to be greater than 0");
if (len >= BLOCK_COPY_LIMIT)
{
Buffer.BlockCopy(src, src_0, dst, dst_0, len);
}
else
{
// apparently (tested) this is an overkill
// it seems to be faster without this 8-byte loop
//while (len >= 8)
//{
// dst[dst_0] = src[src_0];
// dst[dst_0 + 1] = src[src_0 + 1];
// dst[dst_0 + 2] = src[src_0 + 2];
// dst[dst_0 + 3] = src[src_0 + 3];
// dst[dst_0 + 4] = src[src_0 + 4];
// dst[dst_0 + 5] = src[src_0 + 5];
// dst[dst_0 + 6] = src[src_0 + 6];
// dst[dst_0 + 7] = src[src_0 + 7];
// len -= 8; src_0 += 8; dst_0 += 8;
//}
while (len >= 4)
{
dst[dst_0] = src[src_0];
dst[dst_0 + 1] = src[src_0 + 1];
dst[dst_0 + 2] = src[src_0 + 2];
dst[dst_0 + 3] = src[src_0 + 3];
len -= 4;
src_0 += 4;
dst_0 += 4;
}
while (len-- > 0)
{
dst[dst_0++] = src[src_0++];
}
}
return len;
}
private static int SecureCopy(byte[] buffer, int src, int dst, int dst_end)
{
var diff = dst - src;
var length = dst_end - dst;
var len = length;
Assert(diff >= 4, "Target must be at least 4 bytes further than source");
Assert(BLOCK_COPY_LIMIT > 4, "This method requires BLOCK_COPY_LIMIT > 4");
Assert(len > 0, "Length have to be greater than 0");
if (diff >= BLOCK_COPY_LIMIT)
{
if (diff >= length)
{
Buffer.BlockCopy(buffer, src, buffer, dst, length);
return length; // done
}
do
{
Buffer.BlockCopy(buffer, src, buffer, dst, diff);
src += diff;
dst += diff;
len -= diff;
} while (len >= diff);
}
// apparently (tested) this is an overkill
// it seems to be faster without this 8-byte loop
//while (len >= 8)
//{
// buffer[dst] = buffer[src];
// buffer[dst + 1] = buffer[src + 1];
// buffer[dst + 2] = buffer[src + 2];
// buffer[dst + 3] = buffer[src + 3];
// buffer[dst + 4] = buffer[src + 4];
// buffer[dst + 5] = buffer[src + 5];
// buffer[dst + 6] = buffer[src + 6];
// buffer[dst + 7] = buffer[src + 7];
// dst += 8; src += 8; len -= 8;
//}
while (len >= 4)
{
buffer[dst] = buffer[src];
buffer[dst + 1] = buffer[src + 1];
buffer[dst + 2] = buffer[src + 2];
buffer[dst + 3] = buffer[src + 3];
dst += 4;
src += 4;
len -= 4;
}
while (len-- > 0)
{
buffer[dst++] = buffer[src++];
}
return length; // done
}
#endregion
/// <summary>Encodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static int Encode32Safe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(input, inputOffset, inputLength, output, outputOffset, outputLength);
if (outputLength == 0) return 0;
if (inputLength < LZ4_64KLIMIT)
{
var hashTable = HashTablePool.GetUShortHashTablePool();
return LZ4_compress64kCtx_safe32(hashTable, input, output, inputOffset, outputOffset, inputLength, outputLength);
}
else
{
var hashTable = HashTablePool.GetIntHashTablePool();
return LZ4_compressCtx_safe32(hashTable, input, output, inputOffset, outputOffset, inputLength, outputLength);
}
}
/// <summary>Encodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static int Encode64Safe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(input, inputOffset, inputLength, output, outputOffset, outputLength);
if (outputLength == 0) return 0;
if (inputLength < LZ4_64KLIMIT)
{
var hashTable = HashTablePool.GetUShortHashTablePool();
return LZ4_compress64kCtx_safe64(hashTable, input, output, inputOffset, outputOffset, inputLength, outputLength);
}
else
{
var hashTable = HashTablePool.GetIntHashTablePool();
return LZ4_compressCtx_safe64(hashTable, input, output, inputOffset, outputOffset, inputLength, outputLength);
}
}
/// <summary>Decodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static int Decode32Safe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(input, inputOffset, inputLength, output, outputOffset, outputLength);
if (outputLength == 0) return 0;
var length = LZ4_uncompress_safe32(input, output, inputOffset, outputOffset, outputLength);
if (length != inputLength)
throw new ArgumentException("LZ4 block is corrupted, or invalid length has been given.");
return outputLength;
}
/// <summary>Decodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static int Decode64Safe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(
input, inputOffset, inputLength,
output, outputOffset, outputLength);
if (outputLength == 0) return 0;
var length = LZ4_uncompress_safe64(input, output, inputOffset, outputOffset, outputLength);
if (length != inputLength)
throw new ArgumentException("LZ4 block is corrupted, or invalid length has been given.");
return outputLength;
}
}
}
// ReSharper restore InconsistentNaming
// ReSharper restore CheckNamespace

View File

@ -0,0 +1,669 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#region LZ4 original
/*
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2012, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
*/
#endregion
#region LZ4 port
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
// ReSharper disable CheckNamespace
// ReSharper disable InconsistentNaming
// ReSharper disable TooWideLocalVariableScope
// ReSharper disable JoinDeclarationAndInitializer
// ReSharper disable RedundantIfElseBlock
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
internal static partial class LZ4Codec
{
#region LZ4_compressCtx
private static int LZ4_compressCtx_safe32(
int[] hash_table,
byte[] src,
byte[] dst,
int src_0,
int dst_0,
int src_len,
int dst_maxlen)
{
unchecked
{
var debruijn32 = DEBRUIJN_TABLE_32;
int _i;
// ---- preprocessed source start here ----
// r93
var src_p = src_0;
var src_base = src_0;
var src_anchor = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst_0;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_32 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
hash_table[(((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST)] = (src_p - src_base);
src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
int src_ref;
int dst_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = (((Peek4(src, src_p_fwd)) * 2654435761u) >> HASH_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (src_p - src_base);
} while ((src_ref < src_p - MAX_DISTANCE) || (!Equal4(src, src_ref, src_p)));
// Catch up
while ((src_p > src_anchor) && (src_ref > src_0) && (src[src_p - 1] == src[src_ref - 1]))
{
src_p--;
src_ref--;
}
// Encode Literal length
length = (src_p - src_anchor);
dst_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
var len = length - RUN_MASK;
dst[dst_token] = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
dst[dst_p++] = 255;
len -= 255;
} while (len > 254);
dst[dst_p++] = (byte)len;
BlockCopy(src, src_anchor, dst, dst_p, length);
dst_p += length;
goto _next_match;
}
else
dst[dst_p++] = (byte)len;
}
else
{
dst[dst_token] = (byte)(length << ML_BITS);
}
// Copy Literals
if (length > 0)
{
_i = dst_p + length;
WildCopy(src, src_anchor, dst, dst_p, _i);
dst_p = _i;
}
_next_match:
// Encode Offset
Poke2(dst, dst_p, (ushort)(src_p - src_ref));
dst_p += 2;
// Start Counting
src_p += MINMATCH;
src_ref += MINMATCH; // MinMatch already verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (int)Xor4(src, src_ref, src_p);
if (diff == 0)
{
src_p += STEPSIZE_32;
src_ref += STEPSIZE_32;
continue;
}
src_p += debruijn32[((uint)((diff) & -(diff)) * 0x077CB531u) >> 27];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_1) && (Equal2(src, src_ref, src_p)))
{
src_p += 2;
src_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (src[src_ref] == src[src_p])) src_p++;
_endCount:
// Encode MatchLength
length = (src_p - src_anchor);
if (dst_p + (length >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (length >= ML_MASK)
{
dst[dst_token] += ML_MASK;
length -= ML_MASK;
for (; length > 509; length -= 510)
{
dst[dst_p++] = 255;
dst[dst_p++] = 255;
}
if (length > 254)
{
length -= 255;
dst[dst_p++] = 255;
}
dst[dst_p++] = (byte)length;
}
else
{
dst[dst_token] += (byte)length;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[(((Peek4(src, src_p - 2)) * 2654435761u) >> HASH_ADJUST)] = (src_p - 2 - src_base);
// Test next position
h = (((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (src_p - src_base);
if ((src_ref > src_p - (MAX_DISTANCE + 1)) && (Equal4(src, src_ref, src_p)))
{
dst_token = dst_p++;
dst[dst_token] = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST);
}
_last_literals:
// Encode Last Literals
{
var lastRun = (src_end - src_anchor);
if (dst_p + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
dst[dst_p++] = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) dst[dst_p++] = 255;
dst[dst_p++] = (byte)lastRun;
}
else dst[dst_p++] = (byte)(lastRun << ML_BITS);
BlockCopy(src, src_anchor, dst, dst_p, src_end - src_anchor);
dst_p += src_end - src_anchor;
}
// End
return ((dst_p) - dst_0);
}
}
#endregion
#region LZ4_compress64kCtx
private static int LZ4_compress64kCtx_safe32(
ushort[] hash_table,
byte[] src,
byte[] dst,
int src_0,
int dst_0,
int src_len,
int dst_maxlen)
{
unchecked
{
var debruijn32 = DEBRUIJN_TABLE_32;
int _i;
// ---- preprocessed source start here ----
// r93
var src_p = src_0;
var src_anchor = src_p;
var src_base = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst_0;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_32 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int len, length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH64K_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
int src_ref;
int dst_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = (((Peek4(src, src_p_fwd)) * 2654435761u) >> HASH64K_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
} while (!Equal4(src, src_ref, src_p));
// Catch up
while ((src_p > src_anchor) && (src_ref > src_0) && (src[src_p - 1] == src[src_ref - 1]))
{
src_p--;
src_ref--;
}
// Encode Literal length
length = (src_p - src_anchor);
dst_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
len = length - RUN_MASK;
dst[dst_token] = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
dst[dst_p++] = 255;
len -= 255;
} while (len > 254);
dst[dst_p++] = (byte)len;
BlockCopy(src, src_anchor, dst, dst_p, length);
dst_p += length;
goto _next_match;
}
else
{
dst[dst_p++] = (byte)len;
}
}
else
{
dst[dst_token] = (byte)(length << ML_BITS);
}
// Copy Literals
if (length > 0)
{
_i = dst_p + length;
WildCopy(src, src_anchor, dst, dst_p, _i);
dst_p = _i;
}
_next_match:
// Encode Offset
Poke2(dst, dst_p, (ushort)(src_p - src_ref));
dst_p += 2;
// Start Counting
src_p += MINMATCH;
src_ref += MINMATCH; // MinMatch verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (int)Xor4(src, src_ref, src_p);
if (diff == 0)
{
src_p += STEPSIZE_32;
src_ref += STEPSIZE_32;
continue;
}
src_p += debruijn32[((uint)((diff) & -(diff)) * 0x077CB531u) >> 27];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_1) && (Equal2(src, src_ref, src_p)))
{
src_p += 2;
src_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (src[src_ref] == src[src_p])) src_p++;
_endCount:
// Encode MatchLength
len = (src_p - src_anchor);
if (dst_p + (len >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (len >= ML_MASK)
{
dst[dst_token] += ML_MASK;
len -= ML_MASK;
for (; len > 509; len -= 510)
{
dst[dst_p++] = 255;
dst[dst_p++] = 255;
}
if (len > 254)
{
len -= 255;
dst[dst_p++] = 255;
}
dst[dst_p++] = (byte)len;
}
else
{
dst[dst_token] += (byte)len;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[(((Peek4(src, src_p - 2)) * 2654435761u) >> HASH64K_ADJUST)] = (ushort)(src_p - 2 - src_base);
// Test next position
h = (((Peek4(src, src_p)) * 2654435761u) >> HASH64K_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
if (Equal4(src, src_ref, src_p))
{
dst_token = dst_p++;
dst[dst_token] = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH64K_ADJUST);
}
_last_literals:
// Encode Last Literals
var lastRun = (src_end - src_anchor);
if (dst_p + lastRun + 1 + (lastRun - RUN_MASK + 255) / 255 > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
dst[dst_p++] = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) dst[dst_p++] = 255;
dst[dst_p++] = (byte)lastRun;
}
else
{
dst[dst_p++] = (byte)(lastRun << ML_BITS);
}
BlockCopy(src, src_anchor, dst, dst_p, src_end - src_anchor);
dst_p += src_end - src_anchor;
// End
return ((dst_p) - dst_0);
}
}
#endregion
#region LZ4_uncompress
private static int LZ4_uncompress_safe32(
byte[] src,
byte[] dst,
int src_0,
int dst_0,
int dst_len)
{
unchecked
{
var dec32table = DECODER_TABLE_32;
int _i;
// ---- preprocessed source start here ----
// r93
var src_p = src_0;
int dst_ref;
var dst_p = dst_0;
var dst_end = dst_p + dst_len;
int dst_cpy;
var dst_LASTLITERALS = dst_end - LASTLITERALS;
var dst_COPYLENGTH = dst_end - COPYLENGTH;
var dst_COPYLENGTH_STEPSIZE_4 = dst_end - COPYLENGTH - (STEPSIZE_32 - 4);
byte token;
// Main Loop
while (true)
{
int length;
// get runlength
token = src[src_p++];
if ((length = (token >> ML_BITS)) == RUN_MASK)
{
int len;
for (; (len = src[src_p++]) == 255; length += 255)
{
/* do nothing */
}
length += len;
}
// copy literals
dst_cpy = dst_p + length;
if (dst_cpy > dst_COPYLENGTH)
{
if (dst_cpy != dst_end) goto _output_error; // Error : not enough place for another match (min 4) + 5 literals
BlockCopy(src, src_p, dst, dst_p, length);
src_p += length;
break; // EOF
}
if (dst_p < dst_cpy)
{
_i = WildCopy(src, src_p, dst, dst_p, dst_cpy);
src_p += _i;
dst_p += _i;
}
src_p -= (dst_p - dst_cpy);
dst_p = dst_cpy;
// get offset
dst_ref = (dst_cpy) - Peek2(src, src_p);
src_p += 2;
if (dst_ref < dst_0) goto _output_error; // Error : offset outside destination buffer
// get matchlength
if ((length = (token & ML_MASK)) == ML_MASK)
{
for (; src[src_p] == 255; length += 255) src_p++;
length += src[src_p++];
}
// copy repeated sequence
if ((dst_p - dst_ref) < STEPSIZE_32)
{
const int dec64 = 0;
dst[dst_p + 0] = dst[dst_ref + 0];
dst[dst_p + 1] = dst[dst_ref + 1];
dst[dst_p + 2] = dst[dst_ref + 2];
dst[dst_p + 3] = dst[dst_ref + 3];
dst_p += 4;
dst_ref += 4;
dst_ref -= dec32table[dst_p - dst_ref];
Copy4(dst, dst_ref, dst_p);
dst_p += STEPSIZE_32 - 4;
dst_ref -= dec64;
}
else
{
Copy4(dst, dst_ref, dst_p);
dst_p += 4;
dst_ref += 4;
}
dst_cpy = dst_p + length - (STEPSIZE_32 - 4);
if (dst_cpy > dst_COPYLENGTH_STEPSIZE_4)
{
if (dst_cpy > dst_LASTLITERALS) goto _output_error; // Error : last 5 bytes must be literals
if (dst_p < dst_COPYLENGTH)
{
_i = SecureCopy(dst, dst_ref, dst_p, dst_COPYLENGTH);
dst_ref += _i;
dst_p += _i;
}
while (dst_p < dst_cpy) dst[dst_p++] = dst[dst_ref++];
dst_p = dst_cpy;
continue;
}
if (dst_p < dst_cpy)
{
SecureCopy(dst, dst_ref, dst_p, dst_cpy);
}
dst_p = dst_cpy; // correction
}
// end of decoding
return ((src_p) - src_0);
// write overflow error detected
_output_error:
return (-((src_p) - src_0));
}
}
#endregion
}
}
// ReSharper restore RedundantIfElseBlock
// ReSharper restore JoinDeclarationAndInitializer
// ReSharper restore TooWideLocalVariableScope
// ReSharper restore InconsistentNaming
// ReSharper restore CheckNamespace

View File

@ -0,0 +1,682 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#region LZ4 original
/*
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2012, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
*/
#endregion
#region LZ4 port
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
// ReSharper disable CheckNamespace
// ReSharper disable InconsistentNaming
// ReSharper disable TooWideLocalVariableScope
// ReSharper disable JoinDeclarationAndInitializer
// ReSharper disable RedundantIfElseBlock
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
internal static partial class LZ4Codec
{
#region LZ4_compressCtx
private static int LZ4_compressCtx_safe64(
int[] hash_table,
byte[] src,
byte[] dst,
int src_0,
int dst_0,
int src_len,
int dst_maxlen)
{
unchecked
{
var debruijn64 = DEBRUIJN_TABLE_64;
int _i;
// ---- preprocessed source start here ----
// r93
var src_p = src_0;
var src_base = src_0;
var src_anchor = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst_0;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_3 = src_LASTLITERALS - 3;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_64 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
hash_table[(((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST)] = (src_p - src_base);
src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
int src_ref;
int dst_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = (((Peek4(src, src_p_fwd)) * 2654435761u) >> HASH_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (src_p - src_base);
} while ((src_ref < src_p - MAX_DISTANCE) || (!Equal4(src, src_ref, src_p)));
// Catch up
while ((src_p > src_anchor) && (src_ref > src_0) && (src[src_p - 1] == src[src_ref - 1]))
{
src_p--;
src_ref--;
}
// Encode Literal length
length = (src_p - src_anchor);
dst_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
var len = length - RUN_MASK;
dst[dst_token] = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
dst[dst_p++] = 255;
len -= 255;
} while (len > 254);
dst[dst_p++] = (byte)len;
BlockCopy(src, src_anchor, dst, dst_p, length);
dst_p += length;
goto _next_match;
}
else
dst[dst_p++] = (byte)len;
}
else
{
dst[dst_token] = (byte)(length << ML_BITS);
}
// Copy Literals
if (length > 0)
{
_i = dst_p + length;
WildCopy(src, src_anchor, dst, dst_p, _i);
dst_p = _i;
}
_next_match:
// Encode Offset
Poke2(dst, dst_p, (ushort)(src_p - src_ref));
dst_p += 2;
// Start Counting
src_p += MINMATCH;
src_ref += MINMATCH; // MinMatch already verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (long)Xor8(src, src_ref, src_p);
if (diff == 0)
{
src_p += STEPSIZE_64;
src_ref += STEPSIZE_64;
continue;
}
src_p += debruijn64[((ulong)((diff) & -(diff)) * 0x0218A392CDABBD3FL) >> 58];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_3) && (Equal4(src, src_ref, src_p)))
{
src_p += 4;
src_ref += 4;
}
if ((src_p < src_LASTLITERALS_1) && (Equal2(src, src_ref, src_p)))
{
src_p += 2;
src_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (src[src_ref] == src[src_p])) src_p++;
_endCount:
// Encode MatchLength
length = (src_p - src_anchor);
if (dst_p + (length >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (length >= ML_MASK)
{
dst[dst_token] += ML_MASK;
length -= ML_MASK;
for (; length > 509; length -= 510)
{
dst[dst_p++] = 255;
dst[dst_p++] = 255;
}
if (length > 254)
{
length -= 255;
dst[dst_p++] = 255;
}
dst[dst_p++] = (byte)length;
}
else
{
dst[dst_token] += (byte)length;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[(((Peek4(src, src_p - 2)) * 2654435761u) >> HASH_ADJUST)] = (src_p - 2 - src_base);
// Test next position
h = (((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (src_p - src_base);
if ((src_ref > src_p - (MAX_DISTANCE + 1)) && (Equal4(src, src_ref, src_p)))
{
dst_token = dst_p++;
dst[dst_token] = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH_ADJUST);
}
_last_literals:
// Encode Last Literals
{
var lastRun = (src_end - src_anchor);
if (dst_p + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
dst[dst_p++] = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) dst[dst_p++] = 255;
dst[dst_p++] = (byte)lastRun;
}
else dst[dst_p++] = (byte)(lastRun << ML_BITS);
BlockCopy(src, src_anchor, dst, dst_p, src_end - src_anchor);
dst_p += src_end - src_anchor;
}
// End
return (dst_p - dst_0);
}
}
#endregion
#region LZ4_compress64kCtx
private static int LZ4_compress64kCtx_safe64(
ushort[] hash_table,
byte[] src,
byte[] dst,
int src_0,
int dst_0,
int src_len,
int dst_maxlen)
{
unchecked
{
var debruijn64 = DEBRUIJN_TABLE_64;
int _i;
// ---- preprocessed source start here ----
// r93
var src_p = src_0;
var src_anchor = src_p;
var src_base = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst_0;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_3 = src_LASTLITERALS - 3;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_64 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int len, length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH64K_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
int src_ref;
int dst_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = (((Peek4(src, src_p_fwd)) * 2654435761u) >> HASH64K_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
} while (!Equal4(src, src_ref, src_p));
// Catch up
while ((src_p > src_anchor) && (src_ref > src_0) && (src[src_p - 1] == src[src_ref - 1]))
{
src_p--;
src_ref--;
}
// Encode Literal length
length = (src_p - src_anchor);
dst_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
len = length - RUN_MASK;
dst[dst_token] = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
dst[dst_p++] = 255;
len -= 255;
} while (len > 254);
dst[dst_p++] = (byte)len;
BlockCopy(src, src_anchor, dst, dst_p, length);
dst_p += length;
goto _next_match;
}
else
dst[dst_p++] = (byte)len;
}
else
{
dst[dst_token] = (byte)(length << ML_BITS);
}
// Copy Literals
if (length > 0) /*?*/
{
_i = dst_p + length;
WildCopy(src, src_anchor, dst, dst_p, _i);
dst_p = _i;
}
_next_match:
// Encode Offset
Poke2(dst, dst_p, (ushort)(src_p - src_ref));
dst_p += 2;
// Start Counting
src_p += MINMATCH;
src_ref += MINMATCH; // MinMatch verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (long)Xor8(src, src_ref, src_p);
if (diff == 0)
{
src_p += STEPSIZE_64;
src_ref += STEPSIZE_64;
continue;
}
src_p += debruijn64[((ulong)((diff) & -(diff)) * 0x0218A392CDABBD3FL) >> 58];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_3) && (Equal4(src, src_ref, src_p)))
{
src_p += 4;
src_ref += 4;
}
if ((src_p < src_LASTLITERALS_1) && (Equal2(src, src_ref, src_p)))
{
src_p += 2;
src_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (src[src_ref] == src[src_p])) src_p++;
_endCount:
// Encode MatchLength
len = (src_p - src_anchor);
if (dst_p + (len >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (len >= ML_MASK)
{
dst[dst_token] += ML_MASK;
len -= ML_MASK;
for (; len > 509; len -= 510)
{
dst[dst_p++] = 255;
dst[dst_p++] = 255;
}
if (len > 254)
{
len -= 255;
dst[dst_p++] = 255;
}
dst[dst_p++] = (byte)len;
}
else
{
dst[dst_token] += (byte)len;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[(((Peek4(src, src_p - 2)) * 2654435761u) >> HASH64K_ADJUST)] = (ushort)(src_p - 2 - src_base);
// Test next position
h = (((Peek4(src, src_p)) * 2654435761u) >> HASH64K_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
if (Equal4(src, src_ref, src_p))
{
dst_token = dst_p++;
dst[dst_token] = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = (((Peek4(src, src_p)) * 2654435761u) >> HASH64K_ADJUST);
}
_last_literals:
// Encode Last Literals
{
var lastRun = (src_end - src_anchor);
if (dst_p + lastRun + 1 + (lastRun - RUN_MASK + 255) / 255 > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
dst[dst_p++] = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) dst[dst_p++] = 255;
dst[dst_p++] = (byte)lastRun;
}
else dst[dst_p++] = (byte)(lastRun << ML_BITS);
BlockCopy(src, src_anchor, dst, dst_p, src_end - src_anchor);
dst_p += src_end - src_anchor;
}
// End
return (dst_p - dst_0);
}
}
#endregion
#region LZ4_uncompress
private static int LZ4_uncompress_safe64(
byte[] src,
byte[] dst,
int src_0,
int dst_0,
int dst_len)
{
unchecked
{
var dec32table = DECODER_TABLE_32;
var dec64table = DECODER_TABLE_64;
int _i;
// ---- preprocessed source start here ----
// r93
var src_p = src_0;
int dst_ref;
var dst_p = dst_0;
var dst_end = dst_p + dst_len;
int dst_cpy;
var dst_LASTLITERALS = dst_end - LASTLITERALS;
var dst_COPYLENGTH = dst_end - COPYLENGTH;
var dst_COPYLENGTH_STEPSIZE_4 = dst_end - COPYLENGTH - (STEPSIZE_64 - 4);
uint token;
// Main Loop
while (true)
{
int length;
// get runlength
token = src[src_p++];
if ((length = (byte)(token >> ML_BITS)) == RUN_MASK)
{
int len;
for (; (len = src[src_p++]) == 255; length += 255)
{
/* do nothing */
}
length += len;
}
// copy literals
dst_cpy = dst_p + length;
if (dst_cpy > dst_COPYLENGTH)
{
if (dst_cpy != dst_end) goto _output_error; // Error : not enough place for another match (min 4) + 5 literals
BlockCopy(src, src_p, dst, dst_p, length);
src_p += length;
break; // EOF
}
if (dst_p < dst_cpy) /*?*/
{
_i = WildCopy(src, src_p, dst, dst_p, dst_cpy);
src_p += _i;
dst_p += _i;
}
src_p -= (dst_p - dst_cpy);
dst_p = dst_cpy;
// get offset
dst_ref = (dst_cpy) - Peek2(src, src_p);
src_p += 2;
if (dst_ref < dst_0) goto _output_error; // Error : offset outside destination buffer
// get matchlength
if ((length = (byte)(token & ML_MASK)) == ML_MASK)
{
for (; src[src_p] == 255; length += 255) src_p++;
length += src[src_p++];
}
// copy repeated sequence
if ((dst_p - dst_ref) < STEPSIZE_64)
{
var dec64 = dec64table[dst_p - dst_ref];
dst[dst_p + 0] = dst[dst_ref + 0];
dst[dst_p + 1] = dst[dst_ref + 1];
dst[dst_p + 2] = dst[dst_ref + 2];
dst[dst_p + 3] = dst[dst_ref + 3];
dst_p += 4;
dst_ref += 4;
dst_ref -= dec32table[dst_p - dst_ref];
Copy4(dst, dst_ref, dst_p);
dst_p += STEPSIZE_64 - 4;
dst_ref -= dec64;
}
else
{
Copy8(dst, dst_ref, dst_p);
dst_p += 8;
dst_ref += 8;
}
dst_cpy = dst_p + length - (STEPSIZE_64 - 4);
if (dst_cpy > dst_COPYLENGTH_STEPSIZE_4)
{
if (dst_cpy > dst_LASTLITERALS) goto _output_error; // Error : last 5 bytes must be literals
if (dst_p < dst_COPYLENGTH)
{
_i = SecureCopy(dst, dst_ref, dst_p, dst_COPYLENGTH);
dst_ref += _i;
dst_p += _i;
}
while (dst_p < dst_cpy) dst[dst_p++] = dst[dst_ref++];
dst_p = dst_cpy;
continue;
}
if (dst_p < dst_cpy)
{
SecureCopy(dst, dst_ref, dst_p, dst_cpy);
}
dst_p = dst_cpy; // correction
}
// end of decoding
return ((src_p) - src_0);
_output_error:
// write overflow error detected
return (-((src_p) - src_0));
}
}
#endregion
}
}
// ReSharper restore RedundantIfElseBlock
// ReSharper restore JoinDeclarationAndInitializer
// ReSharper restore TooWideLocalVariableScope
// ReSharper restore InconsistentNaming
// ReSharper restore CheckNamespace

View File

@ -0,0 +1,226 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if ENABLE_UNSAFE_MSGPACK
#region license
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
using System;
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
/// <summary>Unsafe LZ4 codec.</summary>
internal static partial class LZ4Codec
{
/// <summary>Copies block of memory.</summary>
/// <param name="src">The source.</param>
/// <param name="dst">The destination.</param>
/// <param name="len">The length (in bytes).</param>
private static unsafe void BlockCopy(byte* src, byte* dst, int len)
{
while (len >= 8)
{
*(ulong*)dst = *(ulong*)src;
dst += 8;
src += 8;
len -= 8;
}
if (len >= 4)
{
*(uint*)dst = *(uint*)src;
dst += 4;
src += 4;
len -= 4;
}
if (len >= 2)
{
*(ushort*)dst = *(ushort*)src;
dst += 2;
src += 2;
len -= 2;
}
if (len >= 1)
{
*dst = *src; /* d++; s++; l--; */
}
}
/// <summary>Encodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static unsafe int Encode32Unsafe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(input, inputOffset, inputLength, output, outputOffset, outputLength);
if (outputLength == 0) return 0;
fixed (byte* inputPtr = &input[inputOffset])
fixed (byte* outputPtr = &output[outputOffset])
{
if (inputLength < LZ4_64KLIMIT)
{
var uHashTable = HashTablePool.GetUShortHashTablePool();
fixed (ushort* hash1 = &uHashTable[0])
{
return LZ4_compress64kCtx_32(hash1, inputPtr, outputPtr, inputLength, outputLength);
}
}
else
{
var bHashTable = HashTablePool.GetUIntHashTablePool();
fixed (uint* hash2 = &bHashTable[0])
{
return LZ4_compressCtx_32(hash2, inputPtr, outputPtr, inputLength, outputLength);
}
}
}
}
/// <summary>Decodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static unsafe int Decode32Unsafe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(
input, inputOffset, inputLength,
output, outputOffset, outputLength);
if (outputLength == 0) return 0;
fixed (byte* inputPtr = &input[inputOffset])
fixed (byte* outputPtr = &output[outputOffset])
{
var length = LZ4_uncompress_32(inputPtr, outputPtr, outputLength);
if (length != inputLength)
throw new ArgumentException("LZ4 block is corrupted, or invalid length has been given.");
return outputLength;
}
}
/// <summary>Encodes the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static unsafe int Encode64Unsafe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(
input, inputOffset, inputLength,
output, outputOffset, outputLength);
if (outputLength == 0) return 0;
fixed (byte* inputPtr = &input[inputOffset])
fixed (byte* outputPtr = &output[outputOffset])
{
if (inputLength < LZ4_64KLIMIT)
{
var uHashTable = HashTablePool.GetUShortHashTablePool();
fixed (ushort* h1 = &uHashTable[0])
{
return LZ4_compress64kCtx_64(h1, inputPtr, outputPtr, inputLength, outputLength);
}
}
else
{
var uiHashTable = HashTablePool.GetUIntHashTablePool();
fixed (uint* h2 = &uiHashTable[0])
{
return LZ4_compressCtx_64(h2, inputPtr, outputPtr, inputLength, outputLength);
}
}
}
}
/// <summary>Decode64s the specified input.</summary>
/// <param name="input">The input.</param>
/// <param name="inputOffset">The input offset.</param>
/// <param name="inputLength">Length of the input.</param>
/// <param name="output">The output.</param>
/// <param name="outputOffset">The output offset.</param>
/// <param name="outputLength">Length of the output.</param>
/// <returns>Number of bytes written.</returns>
public static unsafe int Decode64Unsafe(
byte[] input,
int inputOffset,
int inputLength,
byte[] output,
int outputOffset,
int outputLength)
{
CheckArguments(
input, inputOffset, inputLength,
output, outputOffset, outputLength);
if (outputLength == 0) return 0;
fixed (byte* inputPtr = &input[inputOffset])
fixed (byte* outputPtr = &output[outputOffset])
{
var length = LZ4_uncompress_64(inputPtr, outputPtr, outputLength);
if (length != inputLength)
throw new ArgumentException("LZ4 block is corrupted, or invalid length has been given.");
return outputLength;
}
}
}
}
#endif

View File

@ -0,0 +1,684 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if ENABLE_UNSAFE_MSGPACK
#region LZ4 original
/*
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2012, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
*/
#endregion
#region LZ4 port
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
// ReSharper disable InconsistentNaming
// ReSharper disable TooWideLocalVariableScope
// ReSharper disable JoinDeclarationAndInitializer
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
internal static partial class LZ4Codec
{
#region LZ4_compressCtx_32
private static unsafe int LZ4_compressCtx_32(
uint* hash_table,
byte* src,
byte* dst,
int src_len,
int dst_maxlen)
{
unchecked
{
byte* _p;
fixed (int* debruijn32 = &DEBRUIJN_TABLE_32[0])
{
// r93
var src_p = src;
var src_base = src_p;
var src_anchor = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_32 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
hash_table[((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST)] = (uint)(src_p - src_base);
src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
byte* xxx_ref;
byte* xxx_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = ((((*(uint*)(src_p_fwd))) * 2654435761u) >> HASH_ADJUST);
xxx_ref = src_base + hash_table[h];
hash_table[h] = (uint)(src_p - src_base);
} while ((xxx_ref < src_p - MAX_DISTANCE) || ((*(uint*)(xxx_ref)) != (*(uint*)(src_p))));
// Catch up
while ((src_p > src_anchor) && (xxx_ref > src) && (src_p[-1] == xxx_ref[-1]))
{
src_p--;
xxx_ref--;
}
// Encode Literal length
length = (int)(src_p - src_anchor);
xxx_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
var len = length - RUN_MASK;
*xxx_token = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
*dst_p++ = 255;
len -= 255;
} while (len > 254);
*dst_p++ = (byte)len;
BlockCopy(src_anchor, dst_p, (length));
dst_p += length;
goto _next_match;
}
*dst_p++ = (byte)len;
}
else
{
*xxx_token = (byte)(length << ML_BITS);
}
// Copy Literals
_p = dst_p + (length);
do
{
*(uint*)dst_p = *(uint*)src_anchor;
dst_p += 4;
src_anchor += 4;
*(uint*)dst_p = *(uint*)src_anchor;
dst_p += 4;
src_anchor += 4;
} while (dst_p < _p);
dst_p = _p;
_next_match:
// Encode Offset
*(ushort*)dst_p = (ushort)(src_p - xxx_ref);
dst_p += 2;
// Start Counting
src_p += MINMATCH;
xxx_ref += MINMATCH; // MinMatch already verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (*(int*)(xxx_ref)) ^ (*(int*)(src_p));
if (diff == 0)
{
src_p += STEPSIZE_32;
xxx_ref += STEPSIZE_32;
continue;
}
src_p += debruijn32[(((uint)((diff) & -(diff)) * 0x077CB531u)) >> 27];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_1) && ((*(ushort*)(xxx_ref)) == (*(ushort*)(src_p))))
{
src_p += 2;
xxx_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (*xxx_ref == *src_p)) src_p++;
_endCount:
// Encode MatchLength
length = (int)(src_p - src_anchor);
if (dst_p + (length >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (length >= ML_MASK)
{
*xxx_token += ML_MASK;
length -= ML_MASK;
for (; length > 509; length -= 510)
{
*dst_p++ = 255;
*dst_p++ = 255;
}
if (length > 254)
{
length -= 255;
*dst_p++ = 255;
}
*dst_p++ = (byte)length;
}
else
{
*xxx_token += (byte)length;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[((((*(uint*)(src_p - 2))) * 2654435761u) >> HASH_ADJUST)] = (uint)(src_p - 2 - src_base);
// Test next position
h = ((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST);
xxx_ref = src_base + hash_table[h];
hash_table[h] = (uint)(src_p - src_base);
if ((xxx_ref > src_p - (MAX_DISTANCE + 1)) && ((*(uint*)(xxx_ref)) == (*(uint*)(src_p))))
{
xxx_token = dst_p++;
*xxx_token = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST);
}
_last_literals:
// Encode Last Literals
{
var lastRun = (int)(src_end - src_anchor);
if (dst_p + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
*dst_p++ = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) *dst_p++ = 255;
*dst_p++ = (byte)lastRun;
}
else *dst_p++ = (byte)(lastRun << ML_BITS);
BlockCopy(src_anchor, dst_p, (int)(src_end - src_anchor));
dst_p += src_end - src_anchor;
}
// End
return (int)((dst_p) - dst);
}
}
}
#endregion
#region LZ4_compress64kCtx_32
private static unsafe int LZ4_compress64kCtx_32(
ushort* hash_table,
byte* src,
byte* dst,
int src_len,
int dst_maxlen)
{
unchecked
{
byte* _p;
fixed (int* debruijn32 = &DEBRUIJN_TABLE_32[0])
{
// r93
var src_p = src;
var src_anchor = src_p;
var src_base = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_32 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int len, length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH64K_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
byte* xxx_ref;
byte* xxx_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = ((((*(uint*)(src_p_fwd))) * 2654435761u) >> HASH64K_ADJUST);
xxx_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
} while ((*(uint*)(xxx_ref)) != (*(uint*)(src_p)));
// Catch up
while ((src_p > src_anchor) && (xxx_ref > src) && (src_p[-1] == xxx_ref[-1]))
{
src_p--;
xxx_ref--;
}
// Encode Literal length
length = (int)(src_p - src_anchor);
xxx_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
len = length - RUN_MASK;
*xxx_token = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
*dst_p++ = 255;
len -= 255;
} while (len > 254);
*dst_p++ = (byte)len;
BlockCopy(src_anchor, dst_p, (length));
dst_p += length;
goto _next_match;
}
*dst_p++ = (byte)len;
}
else
{
*xxx_token = (byte)(length << ML_BITS);
}
// Copy Literals
_p = dst_p + (length);
do
{
*(uint*)dst_p = *(uint*)src_anchor;
dst_p += 4;
src_anchor += 4;
*(uint*)dst_p = *(uint*)src_anchor;
dst_p += 4;
src_anchor += 4;
} while (dst_p < _p);
dst_p = _p;
_next_match:
// Encode Offset
*(ushort*)dst_p = (ushort)(src_p - xxx_ref);
dst_p += 2;
// Start Counting
src_p += MINMATCH;
xxx_ref += MINMATCH; // MinMatch verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (*(int*)(xxx_ref)) ^ (*(int*)(src_p));
if (diff == 0)
{
src_p += STEPSIZE_32;
xxx_ref += STEPSIZE_32;
continue;
}
src_p += debruijn32[(((uint)((diff) & -(diff)) * 0x077CB531u)) >> 27];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_1) && ((*(ushort*)(xxx_ref)) == (*(ushort*)(src_p))))
{
src_p += 2;
xxx_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (*xxx_ref == *src_p)) src_p++;
_endCount:
// Encode MatchLength
len = (int)(src_p - src_anchor);
if (dst_p + (len >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (len >= ML_MASK)
{
*xxx_token += ML_MASK;
len -= ML_MASK;
for (; len > 509; len -= 510)
{
*dst_p++ = 255;
*dst_p++ = 255;
}
if (len > 254)
{
len -= 255;
*dst_p++ = 255;
}
*dst_p++ = (byte)len;
}
else *xxx_token += (byte)len;
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[((((*(uint*)(src_p - 2))) * 2654435761u) >> HASH64K_ADJUST)] = (ushort)(src_p - 2 - src_base);
// Test next position
h = ((((*(uint*)(src_p))) * 2654435761u) >> HASH64K_ADJUST);
xxx_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
if ((*(uint*)(xxx_ref)) == (*(uint*)(src_p)))
{
xxx_token = dst_p++;
*xxx_token = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH64K_ADJUST);
}
_last_literals:
// Encode Last Literals
{
var lastRun = (int)(src_end - src_anchor);
if (dst_p + lastRun + 1 + (lastRun - RUN_MASK + 255) / 255 > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
*dst_p++ = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) *dst_p++ = 255;
*dst_p++ = (byte)lastRun;
}
else *dst_p++ = (byte)(lastRun << ML_BITS);
BlockCopy(src_anchor, dst_p, (int)(src_end - src_anchor));
dst_p += src_end - src_anchor;
}
// End
return (int)((dst_p) - dst);
}
}
}
#endregion
#region LZ4_uncompress_32
private static unsafe int LZ4_uncompress_32(
byte* src,
byte* dst,
int dst_len)
{
unchecked
{
fixed (int* dec32table = &DECODER_TABLE_32[0])
{
// r93
var src_p = src;
byte* xxx_ref;
var dst_p = dst;
var dst_end = dst_p + dst_len;
byte* dst_cpy;
var dst_LASTLITERALS = dst_end - LASTLITERALS;
var dst_COPYLENGTH = dst_end - COPYLENGTH;
var dst_COPYLENGTH_STEPSIZE_4 = dst_end - COPYLENGTH - (STEPSIZE_32 - 4);
uint xxx_token;
// Main Loop
while (true)
{
int length;
// get runlength
xxx_token = *src_p++;
if ((length = (int)(xxx_token >> ML_BITS)) == RUN_MASK)
{
int len;
for (; (len = *src_p++) == 255; length += 255)
{
/* do nothing */
}
length += len;
}
// copy literals
dst_cpy = dst_p + length;
if (dst_cpy > dst_COPYLENGTH)
{
if (dst_cpy != dst_end) goto _output_error; // Error : not enough place for another match (min 4) + 5 literals
BlockCopy(src_p, dst_p, (length));
src_p += length;
break; // EOF
}
do
{
*(uint*)dst_p = *(uint*)src_p;
dst_p += 4;
src_p += 4;
*(uint*)dst_p = *(uint*)src_p;
dst_p += 4;
src_p += 4;
} while (dst_p < dst_cpy);
src_p -= (dst_p - dst_cpy);
dst_p = dst_cpy;
// get offset
xxx_ref = (dst_cpy) - (*(ushort*)(src_p));
src_p += 2;
if (xxx_ref < dst) goto _output_error; // Error : offset outside destination buffer
// get matchlength
if ((length = (int)(xxx_token & ML_MASK)) == ML_MASK)
{
for (; *src_p == 255; length += 255) src_p++;
length += *src_p++;
}
// copy repeated sequence
if ((dst_p - xxx_ref) < STEPSIZE_32)
{
const int dec64 = 0;
dst_p[0] = xxx_ref[0];
dst_p[1] = xxx_ref[1];
dst_p[2] = xxx_ref[2];
dst_p[3] = xxx_ref[3];
dst_p += 4;
xxx_ref += 4;
xxx_ref -= dec32table[dst_p - xxx_ref];
(*(uint*)(dst_p)) = (*(uint*)(xxx_ref));
dst_p += STEPSIZE_32 - 4;
xxx_ref -= dec64;
}
else
{
*(uint*)dst_p = *(uint*)xxx_ref;
dst_p += 4;
xxx_ref += 4;
}
dst_cpy = dst_p + length - (STEPSIZE_32 - 4);
if (dst_cpy > dst_COPYLENGTH_STEPSIZE_4)
{
if (dst_cpy > dst_LASTLITERALS) goto _output_error; // Error : last 5 bytes must be literals
{
do
{
*(uint*)dst_p = *(uint*)xxx_ref;
dst_p += 4;
xxx_ref += 4;
*(uint*)dst_p = *(uint*)xxx_ref;
dst_p += 4;
xxx_ref += 4;
} while (dst_p < dst_COPYLENGTH);
}
while (dst_p < dst_cpy) *dst_p++ = *xxx_ref++;
dst_p = dst_cpy;
continue;
}
do
{
*(uint*)dst_p = *(uint*)xxx_ref;
dst_p += 4;
xxx_ref += 4;
*(uint*)dst_p = *(uint*)xxx_ref;
dst_p += 4;
xxx_ref += 4;
} while (dst_p < dst_cpy);
dst_p = dst_cpy; // correction
}
// end of decoding
return (int)((src_p) - src);
// write overflow error detected
_output_error:
return (int)(-((src_p) - src));
}
}
}
#endregion
}
}
// ReSharper restore JoinDeclarationAndInitializer
// ReSharper restore TooWideLocalVariableScope
// ReSharper restore InconsistentNaming
#endif

View File

@ -0,0 +1,686 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if ENABLE_UNSAFE_MSGPACK
#region LZ4 original
/*
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2012, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
*/
#endregion
#region LZ4 port
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
// ReSharper disable InconsistentNaming
// ReSharper disable TooWideLocalVariableScope
// ReSharper disable JoinDeclarationAndInitializer
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
internal static partial class LZ4Codec
{
#region LZ4_compressCtx_64
private static unsafe int LZ4_compressCtx_64(
uint* hash_table,
byte* src,
byte* dst,
int src_len,
int dst_maxlen)
{
unchecked
{
byte* _p;
fixed (int* debruijn64 = &DEBRUIJN_TABLE_64[0])
{
// r93
var src_p = src;
var src_base = src_p;
var src_anchor = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_3 = src_LASTLITERALS - 3;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_64 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
hash_table[((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST)] = (uint)(src_p - src_base);
src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
byte* src_ref;
byte* dst_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = ((((*(uint*)(src_p_fwd))) * 2654435761u) >> HASH_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (uint)(src_p - src_base);
} while ((src_ref < src_p - MAX_DISTANCE) || ((*(uint*)(src_ref)) != (*(uint*)(src_p))));
// Catch up
while ((src_p > src_anchor) && (src_ref > src) && (src_p[-1] == src_ref[-1]))
{
src_p--;
src_ref--;
}
// Encode Literal length
length = (int)(src_p - src_anchor);
dst_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
var len = length - RUN_MASK;
*dst_token = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
*dst_p++ = 255;
len -= 255;
} while (len > 254);
*dst_p++ = (byte)len;
BlockCopy(src_anchor, dst_p, (length));
dst_p += length;
goto _next_match;
}
*dst_p++ = (byte)len;
}
else
{
*dst_token = (byte)(length << ML_BITS);
}
// Copy Literals
_p = dst_p + (length);
{
do
{
*(ulong*)dst_p = *(ulong*)src_anchor;
dst_p += 8;
src_anchor += 8;
} while (dst_p < _p);
}
dst_p = _p;
_next_match:
// Encode Offset
*(ushort*)dst_p = (ushort)(src_p - src_ref);
dst_p += 2;
// Start Counting
src_p += MINMATCH;
src_ref += MINMATCH; // MinMatch already verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (*(long*)(src_ref)) ^ (*(long*)(src_p));
if (diff == 0)
{
src_p += STEPSIZE_64;
src_ref += STEPSIZE_64;
continue;
}
src_p += debruijn64[(((ulong)((diff) & -(diff)) * 0x0218A392CDABBD3FL)) >> 58];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_3) && ((*(uint*)(src_ref)) == (*(uint*)(src_p))))
{
src_p += 4;
src_ref += 4;
}
if ((src_p < src_LASTLITERALS_1) && ((*(ushort*)(src_ref)) == (*(ushort*)(src_p))))
{
src_p += 2;
src_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (*src_ref == *src_p)) src_p++;
_endCount:
// Encode MatchLength
length = (int)(src_p - src_anchor);
if (dst_p + (length >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (length >= ML_MASK)
{
*dst_token += ML_MASK;
length -= ML_MASK;
for (; length > 509; length -= 510)
{
*dst_p++ = 255;
*dst_p++ = 255;
}
if (length > 254)
{
length -= 255;
*dst_p++ = 255;
}
*dst_p++ = (byte)length;
}
else
{
*dst_token += (byte)length;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[((((*(uint*)(src_p - 2))) * 2654435761u) >> HASH_ADJUST)] = (uint)(src_p - 2 - src_base);
// Test next position
h = ((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (uint)(src_p - src_base);
if ((src_ref > src_p - (MAX_DISTANCE + 1)) && ((*(uint*)(src_ref)) == (*(uint*)(src_p))))
{
dst_token = dst_p++;
*dst_token = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH_ADJUST);
}
_last_literals:
// Encode Last Literals
var lastRun = (int)(src_end - src_anchor);
if (dst_p + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
*dst_p++ = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) *dst_p++ = 255;
*dst_p++ = (byte)lastRun;
}
else *dst_p++ = (byte)(lastRun << ML_BITS);
BlockCopy(src_anchor, dst_p, (int)(src_end - src_anchor));
dst_p += src_end - src_anchor;
// End
return (int)(dst_p - dst);
}
}
}
#endregion
#region LZ4_compress64kCtx_64
private static unsafe int LZ4_compress64kCtx_64(
ushort* hash_table,
byte* src,
byte* dst,
int src_len,
int dst_maxlen)
{
unchecked
{
byte* _p;
fixed (int* debruijn64 = &DEBRUIJN_TABLE_64[0])
{
// r93
var src_p = src;
var src_anchor = src_p;
var src_base = src_p;
var src_end = src_p + src_len;
var src_mflimit = src_end - MFLIMIT;
var dst_p = dst;
var dst_end = dst_p + dst_maxlen;
var src_LASTLITERALS = src_end - LASTLITERALS;
var src_LASTLITERALS_1 = src_LASTLITERALS - 1;
var src_LASTLITERALS_3 = src_LASTLITERALS - 3;
var src_LASTLITERALS_STEPSIZE_1 = src_LASTLITERALS - (STEPSIZE_64 - 1);
var dst_LASTLITERALS_1 = dst_end - (1 + LASTLITERALS);
var dst_LASTLITERALS_3 = dst_end - (2 + 1 + LASTLITERALS);
int len, length;
uint h, h_fwd;
// Init
if (src_len < MINLENGTH) goto _last_literals;
// First Byte
src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH64K_ADJUST);
// Main Loop
while (true)
{
var findMatchAttempts = (1 << SKIPSTRENGTH) + 3;
var src_p_fwd = src_p;
byte* src_ref;
byte* dst_token;
// Find a match
do
{
h = h_fwd;
var step = findMatchAttempts++ >> SKIPSTRENGTH;
src_p = src_p_fwd;
src_p_fwd = src_p + step;
if (src_p_fwd > src_mflimit) goto _last_literals;
h_fwd = ((((*(uint*)(src_p_fwd))) * 2654435761u) >> HASH64K_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
} while ((*(uint*)(src_ref)) != (*(uint*)(src_p)));
// Catch up
while ((src_p > src_anchor) && (src_ref > src) && (src_p[-1] == src_ref[-1]))
{
src_p--;
src_ref--;
}
// Encode Literal length
length = (int)(src_p - src_anchor);
dst_token = dst_p++;
if (dst_p + length + (length >> 8) > dst_LASTLITERALS_3) return 0; // Check output limit
if (length >= RUN_MASK)
{
len = length - RUN_MASK;
*dst_token = (RUN_MASK << ML_BITS);
if (len > 254)
{
do
{
*dst_p++ = 255;
len -= 255;
} while (len > 254);
*dst_p++ = (byte)len;
BlockCopy(src_anchor, dst_p, (length));
dst_p += length;
goto _next_match;
}
*dst_p++ = (byte)len;
}
else
{
*dst_token = (byte)(length << ML_BITS);
}
// Copy Literals
{
_p = dst_p + (length);
{
do
{
*(ulong*)dst_p = *(ulong*)src_anchor;
dst_p += 8;
src_anchor += 8;
} while (dst_p < _p);
}
dst_p = _p;
}
_next_match:
// Encode Offset
*(ushort*)dst_p = (ushort)(src_p - src_ref);
dst_p += 2;
// Start Counting
src_p += MINMATCH;
src_ref += MINMATCH; // MinMatch verified
src_anchor = src_p;
while (src_p < src_LASTLITERALS_STEPSIZE_1)
{
var diff = (*(long*)(src_ref)) ^ (*(long*)(src_p));
if (diff == 0)
{
src_p += STEPSIZE_64;
src_ref += STEPSIZE_64;
continue;
}
src_p += debruijn64[(((ulong)((diff) & -(diff)) * 0x0218A392CDABBD3FL)) >> 58];
goto _endCount;
}
if ((src_p < src_LASTLITERALS_3) && ((*(uint*)(src_ref)) == (*(uint*)(src_p))))
{
src_p += 4;
src_ref += 4;
}
if ((src_p < src_LASTLITERALS_1) && ((*(ushort*)(src_ref)) == (*(ushort*)(src_p))))
{
src_p += 2;
src_ref += 2;
}
if ((src_p < src_LASTLITERALS) && (*src_ref == *src_p)) src_p++;
_endCount:
// Encode MatchLength
len = (int)(src_p - src_anchor);
if (dst_p + (len >> 8) > dst_LASTLITERALS_1) return 0; // Check output limit
if (len >= ML_MASK)
{
*dst_token += ML_MASK;
len -= ML_MASK;
for (; len > 509; len -= 510)
{
*dst_p++ = 255;
*dst_p++ = 255;
}
if (len > 254)
{
len -= 255;
*dst_p++ = 255;
}
*dst_p++ = (byte)len;
}
else
{
*dst_token += (byte)len;
}
// Test end of chunk
if (src_p > src_mflimit)
{
src_anchor = src_p;
break;
}
// Fill table
hash_table[((((*(uint*)(src_p - 2))) * 2654435761u) >> HASH64K_ADJUST)] = (ushort)(src_p - 2 - src_base);
// Test next position
h = ((((*(uint*)(src_p))) * 2654435761u) >> HASH64K_ADJUST);
src_ref = src_base + hash_table[h];
hash_table[h] = (ushort)(src_p - src_base);
if ((*(uint*)(src_ref)) == (*(uint*)(src_p)))
{
dst_token = dst_p++;
*dst_token = 0;
goto _next_match;
}
// Prepare next loop
src_anchor = src_p++;
h_fwd = ((((*(uint*)(src_p))) * 2654435761u) >> HASH64K_ADJUST);
}
_last_literals:
// Encode Last Literals
var lastRun = (int)(src_end - src_anchor);
if (dst_p + lastRun + 1 + (lastRun - RUN_MASK + 255) / 255 > dst_end) return 0;
if (lastRun >= RUN_MASK)
{
*dst_p++ = (RUN_MASK << ML_BITS);
lastRun -= RUN_MASK;
for (; lastRun > 254; lastRun -= 255) *dst_p++ = 255;
*dst_p++ = (byte)lastRun;
}
else *dst_p++ = (byte)(lastRun << ML_BITS);
BlockCopy(src_anchor, dst_p, (int)(src_end - src_anchor));
dst_p += src_end - src_anchor;
// End
return (int)(dst_p - dst);
}
}
}
#endregion
#region LZ4_uncompress_64
private static unsafe int LZ4_uncompress_64(
byte* src,
byte* dst,
int dst_len)
{
unchecked
{
fixed (int* dec32table = &DECODER_TABLE_32[0])
fixed (int* dec64table = &DECODER_TABLE_64[0])
{
// r93
var src_p = src;
byte* dst_ref;
var dst_p = dst;
var dst_end = dst_p + dst_len;
byte* dst_cpy;
var dst_LASTLITERALS = dst_end - LASTLITERALS;
var dst_COPYLENGTH = dst_end - COPYLENGTH;
var dst_COPYLENGTH_STEPSIZE_4 = dst_end - COPYLENGTH - (STEPSIZE_64 - 4);
byte token;
// Main Loop
while (true)
{
int length;
// get runlength
token = *src_p++;
if ((length = (token >> ML_BITS)) == RUN_MASK)
{
int len;
for (; (len = *src_p++) == 255; length += 255)
{
/* do nothing */
}
length += len;
}
// copy literals
dst_cpy = dst_p + length;
if (dst_cpy > dst_COPYLENGTH)
{
if (dst_cpy != dst_end) goto _output_error; // Error : not enough place for another match (min 4) + 5 literals
BlockCopy(src_p, dst_p, (length));
src_p += length;
break; // EOF
}
do
{
*(ulong*)dst_p = *(ulong*)src_p;
dst_p += 8;
src_p += 8;
} while (dst_p < dst_cpy);
src_p -= (dst_p - dst_cpy);
dst_p = dst_cpy;
// get offset
dst_ref = (dst_cpy) - (*(ushort*)(src_p));
src_p += 2;
if (dst_ref < dst) goto _output_error; // Error : offset outside destination buffer
// get matchlength
if ((length = (token & ML_MASK)) == ML_MASK)
{
for (; *src_p == 255; length += 255) src_p++;
length += *src_p++;
}
// copy repeated sequence
if ((dst_p - dst_ref) < STEPSIZE_64)
{
var dec64 = dec64table[dst_p - dst_ref];
dst_p[0] = dst_ref[0];
dst_p[1] = dst_ref[1];
dst_p[2] = dst_ref[2];
dst_p[3] = dst_ref[3];
dst_p += 4;
dst_ref += 4;
dst_ref -= dec32table[dst_p - dst_ref];
(*(uint*)(dst_p)) = (*(uint*)(dst_ref));
dst_p += STEPSIZE_64 - 4;
dst_ref -= dec64;
}
else
{
*(ulong*)dst_p = *(ulong*)dst_ref;
dst_p += 8;
dst_ref += 8;
}
dst_cpy = dst_p + length - (STEPSIZE_64 - 4);
if (dst_cpy > dst_COPYLENGTH_STEPSIZE_4)
{
if (dst_cpy > dst_LASTLITERALS) goto _output_error; // Error : last 5 bytes must be literals
while (dst_p < dst_COPYLENGTH)
{
*(ulong*)dst_p = *(ulong*)dst_ref;
dst_p += 8;
dst_ref += 8;
}
while (dst_p < dst_cpy) *dst_p++ = *dst_ref++;
dst_p = dst_cpy;
continue;
}
{
do
{
*(ulong*)dst_p = *(ulong*)dst_ref;
dst_p += 8;
dst_ref += 8;
} while (dst_p < dst_cpy);
}
dst_p = dst_cpy; // correction
}
// end of decoding
return (int)((src_p) - src);
// write overflow error detected
_output_error:
return (int)(-((src_p) - src));
}
}
}
#endregion
}
}
// ReSharper restore JoinDeclarationAndInitializer
// ReSharper restore TooWideLocalVariableScope
// ReSharper restore InconsistentNaming
#endif

View File

@ -0,0 +1,162 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#region license
/*
Copyright (c) 2013, Milosz Krajewski
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
using System;
// ReSharper disable InconsistentNaming
namespace Datadog.Trace.Vendors.MessagePack.LZ4
{
internal static partial class LZ4Codec
{
#region configuration
/// <summary>
/// Memory usage formula : N->2^N Bytes (examples : 10 -> 1KB; 12 -> 4KB ; 16 -> 64KB; 20 -> 1MB; etc.)
/// Increasing memory usage improves compression ratio
/// Reduced memory usage can improve speed, due to cache effect
/// Default value is 14, for 16KB, which nicely fits into Intel x86 L1 cache
/// </summary>
private const int MEMORY_USAGE = 12; // modified use 12.
/// <summary>
/// Decreasing this value will make the algorithm skip faster data segments considered "incompressible"
/// This may decrease compression ratio dramatically, but will be faster on incompressible data
/// Increasing this value will make the algorithm search more before declaring a segment "incompressible"
/// This could improve compression a bit, but will be slower on incompressible data
/// The default value (6) is recommended
/// </summary>
private const int NOTCOMPRESSIBLE_DETECTIONLEVEL = 6;
#endregion
#region consts
private const int MINMATCH = 4;
#pragma warning disable 162, 429
// ReSharper disable once UnreachableCode
private const int SKIPSTRENGTH =
NOTCOMPRESSIBLE_DETECTIONLEVEL > 2
? NOTCOMPRESSIBLE_DETECTIONLEVEL
: 2;
#pragma warning restore 162, 429
private const int COPYLENGTH = 8;
private const int LASTLITERALS = 5;
private const int MFLIMIT = COPYLENGTH + MINMATCH;
private const int MINLENGTH = MFLIMIT + 1;
private const int MAXD_LOG = 16;
private const int MAXD = 1 << MAXD_LOG;
private const int MAXD_MASK = MAXD - 1;
private const int MAX_DISTANCE = (1 << MAXD_LOG) - 1;
private const int ML_BITS = 4;
private const int ML_MASK = (1 << ML_BITS) - 1;
private const int RUN_BITS = 8 - ML_BITS;
private const int RUN_MASK = (1 << RUN_BITS) - 1;
private const int STEPSIZE_64 = 8;
private const int STEPSIZE_32 = 4;
private const int LZ4_64KLIMIT = (1 << 16) + (MFLIMIT - 1);
private const int HASH_LOG = MEMORY_USAGE - 2;
private const int HASH_TABLESIZE = 1 << HASH_LOG;
private const int HASH_ADJUST = (MINMATCH * 8) - HASH_LOG;
private const int HASH64K_LOG = HASH_LOG + 1;
private const int HASH64K_TABLESIZE = 1 << HASH64K_LOG;
private const int HASH64K_ADJUST = (MINMATCH * 8) - HASH64K_LOG;
private const int HASHHC_LOG = MAXD_LOG - 1;
private const int HASHHC_TABLESIZE = 1 << HASHHC_LOG;
private const int HASHHC_ADJUST = (MINMATCH * 8) - HASHHC_LOG;
//private const int HASHHC_MASK = HASHHC_TABLESIZE - 1;
private static readonly int[] DECODER_TABLE_32 = { 0, 3, 2, 3, 0, 0, 0, 0 };
private static readonly int[] DECODER_TABLE_64 = { 0, 0, 0, -1, 0, 1, 2, 3 };
private static readonly int[] DEBRUIJN_TABLE_32 = {
0, 0, 3, 0, 3, 1, 3, 0, 3, 2, 2, 1, 3, 2, 0, 1,
3, 3, 1, 2, 2, 2, 2, 0, 3, 1, 2, 0, 1, 0, 1, 1
};
private static readonly int[] DEBRUIJN_TABLE_64 = {
0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7,
0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7,
7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6,
7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7
};
private const int MAX_NB_ATTEMPTS = 256;
private const int OPTIMAL_ML = (ML_MASK - 1) + MINMATCH;
private const int BLOCK_COPY_LIMIT = 16;
#endregion
#region internal interface (common)
/// <summary>Gets maximum the length of the output.</summary>
/// <param name="inputLength">Length of the input.</param>
/// <returns>Maximum number of bytes needed for compressed buffer.</returns>
public static int MaximumOutputLength(int inputLength)
{
return inputLength + (inputLength / 255) + 16;
}
#endregion
#region internal interface (common)
internal static void CheckArguments(
byte[] input, int inputOffset, int inputLength,
byte[] output, int outputOffset, int outputLength)
{
if (inputLength == 0)
{
outputLength = 0;
return;
}
if (input == null) throw new ArgumentNullException("input");
if ((uint)inputOffset > (uint)input.Length) throw new ArgumentOutOfRangeException("inputOffset");
if ((uint)inputLength > (uint)input.Length - (uint)inputOffset) throw new ArgumentOutOfRangeException("inputLength");
if (output == null) throw new ArgumentNullException("output");
if ((uint)outputOffset > (uint)output.Length) throw new ArgumentOutOfRangeException("outputOffset");
if ((uint)outputLength > (uint)output.Length - (uint)outputOffset) throw new ArgumentOutOfRangeException("outputLength");
}
#endregion
}
}
// ReSharper restore InconsistentNaming

View File

@ -0,0 +1,263 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using Datadog.Trace.Vendors.MessagePack.LZ4;
using System;
using System.Globalization;
using System.IO;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack
{
// JSON API
internal static partial class LZ4MessagePackSerializer
{
/// <summary>
/// Dump to JSON string.
/// </summary>
public static string ToJson<T>(T obj)
{
return ToJson(Serialize(obj));
}
/// <summary>
/// Dump to JSON string.
/// </summary>
public static string ToJson<T>(T obj, IFormatterResolver resolver)
{
return ToJson(Serialize(obj, resolver));
}
/// <summary>
/// Dump message-pack binary to JSON string.
/// </summary>
public static string ToJson(byte[] bytes)
{
if (bytes == null || bytes.Length == 0) return "";
int readSize;
if (MessagePackBinary.GetMessagePackType(bytes, 0) == MessagePackType.Extension)
{
var header = MessagePackBinary.ReadExtensionFormatHeader(bytes, 0, out readSize);
if (header.TypeCode == ExtensionTypeCode)
{
// decode lz4
var offset = readSize;
var length = MessagePackBinary.ReadInt32(bytes, offset, out readSize);
offset += readSize;
var buffer = LZ4MemoryPool.GetBuffer();
if (buffer.Length < length)
{
buffer = new byte[length];
}
// LZ4 Decode
LZ4Codec.Decode(bytes, offset, bytes.Length - offset, buffer, 0, length);
bytes = buffer; // use LZ4 bytes
}
}
var sb = new StringBuilder();
ToJsonCore(bytes, 0, sb);
return sb.ToString();
}
public static byte[] FromJson(string str)
{
using (var sr = new StringReader(str))
{
return FromJson(sr);
}
}
/// <summary>
/// From Json String to LZ4MessagePack binary
/// </summary>
public static byte[] FromJson(TextReader reader)
{
var buffer = MessagePackSerializer.FromJsonUnsafe(reader); // offset is guranteed from 0
return LZ4MessagePackSerializer.ToLZ4Binary(buffer);
}
static int ToJsonCore(byte[] bytes, int offset, StringBuilder builder)
{
var readSize = 0;
var type = MessagePackBinary.GetMessagePackType(bytes, offset);
switch (type)
{
case MessagePackType.Integer:
var code = bytes[offset];
if (MessagePackCode.MinNegativeFixInt <= code && code <= MessagePackCode.MaxNegativeFixInt) builder.Append(MessagePackBinary.ReadSByte(bytes, offset, out readSize));
else if (MessagePackCode.MinFixInt <= code && code <= MessagePackCode.MaxFixInt) builder.Append(MessagePackBinary.ReadByte(bytes, offset, out readSize));
else if (code == MessagePackCode.Int8) builder.Append(MessagePackBinary.ReadSByte(bytes, offset, out readSize));
else if (code == MessagePackCode.Int16) builder.Append(MessagePackBinary.ReadInt16(bytes, offset, out readSize));
else if (code == MessagePackCode.Int32) builder.Append(MessagePackBinary.ReadInt32(bytes, offset, out readSize));
else if (code == MessagePackCode.Int64) builder.Append(MessagePackBinary.ReadInt64(bytes, offset, out readSize));
else if (code == MessagePackCode.UInt8) builder.Append(MessagePackBinary.ReadByte(bytes, offset, out readSize));
else if (code == MessagePackCode.UInt16) builder.Append(MessagePackBinary.ReadUInt16(bytes, offset, out readSize));
else if (code == MessagePackCode.UInt32) builder.Append(MessagePackBinary.ReadUInt32(bytes, offset, out readSize));
else if (code == MessagePackCode.UInt64) builder.Append(MessagePackBinary.ReadUInt64(bytes, offset, out readSize));
break;
case MessagePackType.Boolean:
builder.Append(MessagePackBinary.ReadBoolean(bytes, offset, out readSize) ? "true" : "false");
break;
case MessagePackType.Float:
var floatCode = bytes[offset];
if (floatCode == MessagePackCode.Float32)
{
builder.Append(MessagePackBinary.ReadSingle(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
}
else
{
builder.Append(MessagePackBinary.ReadDouble(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
}
break;
case MessagePackType.String:
WriteJsonString(MessagePackBinary.ReadString(bytes, offset, out readSize), builder);
break;
case MessagePackType.Binary:
builder.Append("\"" + Convert.ToBase64String(MessagePackBinary.ReadBytes(bytes, offset, out readSize)) + "\"");
break;
case MessagePackType.Array:
{
var length = MessagePackBinary.ReadArrayHeaderRaw(bytes, offset, out readSize);
var totalReadSize = readSize;
offset += readSize;
builder.Append("[");
for (int i = 0; i < length; i++)
{
readSize = ToJsonCore(bytes, offset, builder);
offset += readSize;
totalReadSize += readSize;
if (i != length - 1)
{
builder.Append(",");
}
}
builder.Append("]");
return totalReadSize;
}
case MessagePackType.Map:
{
var length = MessagePackBinary.ReadMapHeaderRaw(bytes, offset, out readSize);
var totalReadSize = readSize;
offset += readSize;
builder.Append("{");
for (int i = 0; i < length; i++)
{
// write key
{
var keyType = MessagePackBinary.GetMessagePackType(bytes, offset);
if (keyType == MessagePackType.String || keyType == MessagePackType.Binary)
{
readSize = ToJsonCore(bytes, offset, builder);
}
else
{
builder.Append("\"");
readSize = ToJsonCore(bytes, offset, builder);
builder.Append("\"");
}
offset += readSize;
totalReadSize += readSize;
}
builder.Append(":");
// write body
{
readSize = ToJsonCore(bytes, offset, builder);
offset += readSize;
totalReadSize += readSize;
}
if (i != length - 1)
{
builder.Append(",");
}
}
builder.Append("}");
return totalReadSize;
}
case MessagePackType.Extension:
var extHeader = MessagePackBinary.ReadExtensionFormatHeader(bytes, offset, out readSize);
if (extHeader.TypeCode == ReservedMessagePackExtensionTypeCode.DateTime)
{
var dt = MessagePackBinary.ReadDateTime(bytes, offset, out readSize);
builder.Append("\"");
builder.Append(dt.ToString("o", CultureInfo.InvariantCulture));
builder.Append("\"");
}
else
{
var ext = MessagePackBinary.ReadExtensionFormat(bytes, offset, out readSize);
builder.Append("[");
builder.Append(ext.TypeCode);
builder.Append(",");
builder.Append("\"");
builder.Append(Convert.ToBase64String(ext.Data));
builder.Append("\"");
builder.Append("]");
}
break;
case MessagePackType.Unknown:
case MessagePackType.Nil:
default:
readSize = 1;
builder.Append("null");
break;
}
return readSize;
}
// escape string
static void WriteJsonString(string value, StringBuilder builder)
{
builder.Append('\"');
var len = value.Length;
for (int i = 0; i < len; i++)
{
var c = value[i];
switch (c)
{
case '"':
builder.Append("\\\"");
break;
case '\\':
builder.Append("\\\\");
break;
case '\b':
builder.Append("\\b");
break;
case '\f':
builder.Append("\\f");
break;
case '\n':
builder.Append("\\n");
break;
case '\r':
builder.Append("\\r");
break;
case '\t':
builder.Append("\\t");
break;
default:
builder.Append(c);
break;
}
}
builder.Append('\"');
}
}
}

View File

@ -0,0 +1,281 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if NETSTANDARD || NETFRAMEWORK
using System;
using System.Linq;
using System.Reflection;
using System.IO;
using System.Linq.Expressions;
namespace Datadog.Trace.Vendors.MessagePack
{
internal static partial class LZ4MessagePackSerializer
{
internal static class NonGeneric
{
static readonly Func<Type, CompiledMethods> CreateCompiledMethods;
static readonly MessagePack.Internal.ThreadsafeTypeKeyHashTable<CompiledMethods> serializes = new MessagePack.Internal.ThreadsafeTypeKeyHashTable<CompiledMethods>(capacity: 64);
static NonGeneric()
{
CreateCompiledMethods = t => new CompiledMethods(t);
}
public static byte[] Serialize(Type type, object obj)
{
return GetOrAdd(type).serialize1.Invoke(obj);
}
public static byte[] Serialize(Type type, object obj, IFormatterResolver resolver)
{
return GetOrAdd(type).serialize2.Invoke(obj, resolver);
}
public static void Serialize(Type type, Stream stream, object obj)
{
GetOrAdd(type).serialize3.Invoke(stream, obj);
}
public static void Serialize(Type type, Stream stream, object obj, IFormatterResolver resolver)
{
GetOrAdd(type).serialize4.Invoke(stream, obj, resolver);
}
public static object Deserialize(Type type, byte[] bytes)
{
return GetOrAdd(type).deserialize1.Invoke(bytes);
}
public static object Deserialize(Type type, byte[] bytes, IFormatterResolver resolver)
{
return GetOrAdd(type).deserialize2.Invoke(bytes, resolver);
}
public static object Deserialize(Type type, Stream stream)
{
return GetOrAdd(type).deserialize3.Invoke(stream);
}
public static object Deserialize(Type type, Stream stream, IFormatterResolver resolver)
{
return GetOrAdd(type).deserialize4.Invoke(stream, resolver);
}
public static object Deserialize(Type type, Stream stream, bool readStrict)
{
return GetOrAdd(type).deserialize5.Invoke(stream, readStrict);
}
public static object Deserialize(Type type, Stream stream, IFormatterResolver resolver, bool readStrict)
{
return GetOrAdd(type).deserialize6.Invoke(stream, resolver, readStrict);
}
public static object Deserialize(Type type, ArraySegment<byte> bytes)
{
return GetOrAdd(type).deserialize7.Invoke(bytes);
}
public static object Deserialize(Type type, ArraySegment<byte> bytes, IFormatterResolver resolver)
{
return GetOrAdd(type).deserialize8.Invoke(bytes, resolver);
}
static CompiledMethods GetOrAdd(Type type)
{
return serializes.GetOrAdd(type, CreateCompiledMethods);
}
class CompiledMethods
{
public readonly Func<object, byte[]> serialize1;
public readonly Func<object, IFormatterResolver, byte[]> serialize2;
public readonly Action<Stream, object> serialize3;
public readonly Action<Stream, object, IFormatterResolver> serialize4;
public readonly Func<byte[], object> deserialize1;
public readonly Func<byte[], IFormatterResolver, object> deserialize2;
public readonly Func<Stream, object> deserialize3;
public readonly Func<Stream, IFormatterResolver, object> deserialize4;
public readonly Func<Stream, bool, object> deserialize5;
public readonly Func<Stream, IFormatterResolver, bool, object> deserialize6;
public readonly Func<ArraySegment<byte>, object> deserialize7;
public readonly Func<ArraySegment<byte>, IFormatterResolver, object> deserialize8;
public CompiledMethods(Type type)
{
var ti = type.GetTypeInfo();
{
// public static byte[] Serialize<T>(T obj)
var serialize = GetMethod(type, new Type[] { null });
var param1 = Expression.Parameter(typeof(object), "obj");
var body = Expression.Call(serialize, ti.IsValueType
? Expression.Unbox(param1, type)
: Expression.Convert(param1, type));
var lambda = Expression.Lambda<Func<object, byte[]>>(body, param1).Compile();
this.serialize1 = lambda;
}
{
// public static byte[] Serialize<T>(T obj, IFormatterResolver resolver)
var serialize = GetMethod(type, new Type[] { null, typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(object), "obj");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "formatterResolver");
var body = Expression.Call(serialize, ti.IsValueType
? Expression.Unbox(param1, type)
: Expression.Convert(param1, type), param2);
var lambda = Expression.Lambda<Func<object, IFormatterResolver, byte[]>>(body, param1, param2).Compile();
this.serialize2 = lambda;
}
{
// public static void Serialize<T>(Stream stream, T obj)
var serialize = GetMethod(type, new Type[] { typeof(Stream), null });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(object), "obj");
var body = Expression.Call(serialize, param1, ti.IsValueType
? Expression.Unbox(param2, type)
: Expression.Convert(param2, type));
var lambda = Expression.Lambda<Action<Stream, object>>(body, param1, param2).Compile();
this.serialize3 = lambda;
}
{
// public static void Serialize<T>(Stream stream, T obj, IFormatterResolver resolver)
var serialize = GetMethod(type, new Type[] { typeof(Stream), null, typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(object), "obj");
var param3 = Expression.Parameter(typeof(IFormatterResolver), "formatterResolver");
var body = Expression.Call(serialize, param1, ti.IsValueType
? Expression.Unbox(param2, type)
: Expression.Convert(param2, type), param3);
var lambda = Expression.Lambda<Action<Stream, object, IFormatterResolver>>(body, param1, param2, param3).Compile();
this.serialize4 = lambda;
}
{
// public static T Deserialize<T>(byte[] bytes)
var deserialize = GetMethod(type, new Type[] { typeof(byte[]) });
var param1 = Expression.Parameter(typeof(byte[]), "bytes");
var body = Expression.Convert(Expression.Call(deserialize, param1), typeof(object));
var lambda = Expression.Lambda<Func<byte[], object>>(body, param1).Compile();
this.deserialize1 = lambda;
}
{
// public static T Deserialize<T>(byte[] bytes, IFormatterResolver resolver)
var deserialize = GetMethod(type, new Type[] { typeof(byte[]), typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(byte[]), "bytes");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<byte[], IFormatterResolver, object>>(body, param1, param2).Compile();
this.deserialize2 = lambda;
}
{
// public static T Deserialize<T>(Stream stream)
var deserialize = GetMethod(type, new Type[] { typeof(Stream) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var body = Expression.Convert(Expression.Call(deserialize, param1), typeof(object));
var lambda = Expression.Lambda<Func<Stream, object>>(body, param1).Compile();
this.deserialize3 = lambda;
}
{
// public static T Deserialize<T>(Stream stream, IFormatterResolver resolver)
var deserialize = GetMethod(type, new Type[] { typeof(Stream), typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<Stream, IFormatterResolver, object>>(body, param1, param2).Compile();
this.deserialize4 = lambda;
}
{
// public static T Deserialize<T>(Stream stream, bool readStrict)
var deserialize = GetMethod(type, new Type[] { typeof(Stream), typeof(bool) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(bool), "readStrict");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<Stream, bool, object>>(body, param1, param2).Compile();
this.deserialize5 = lambda;
}
{
// public static T Deserialize<T>(Stream stream, IFormatterResolver resolver, bool readStrict)
var deserialize = GetMethod(type, new Type[] { typeof(Stream), typeof(IFormatterResolver), typeof(bool) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var param3 = Expression.Parameter(typeof(bool), "readStrict");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2, param3), typeof(object));
var lambda = Expression.Lambda<Func<Stream, IFormatterResolver, bool, object>>(body, param1, param2, param3).Compile();
this.deserialize6 = lambda;
}
{
// public static T Deserialize<T>(ArraySegment<byte> bytes)
var deserialize = GetMethod(type, new Type[] { typeof(ArraySegment<byte>) });
var param1 = Expression.Parameter(typeof(ArraySegment<byte>), "bytes");
var body = Expression.Convert(Expression.Call(deserialize, param1), typeof(object));
var lambda = Expression.Lambda<Func<ArraySegment<byte>, object>>(body, param1).Compile();
this.deserialize7 = lambda;
}
{
// public static T Deserialize<T>(ArraySegment<byte> bytes, IFormatterResolver resolver)
var deserialize = GetMethod(type, new Type[] { typeof(ArraySegment<byte>), typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(ArraySegment<byte>), "bytes");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<ArraySegment<byte>, IFormatterResolver, object>>(body, param1, param2).Compile();
this.deserialize8 = lambda;
}
}
// null is generic type marker.
static MethodInfo GetMethod(Type type, Type[] parameters)
{
return typeof(LZ4MessagePackSerializer).GetRuntimeMethods().Where(x =>
{
if (!(x.Name == "Serialize" || x.Name == "Deserialize")) return false;
var ps = x.GetParameters();
if (ps.Length != parameters.Length) return false;
for (int i = 0; i < ps.Length; i++)
{
if (parameters[i] == null && ps[i].ParameterType.IsGenericParameter) continue;
if (ps[i].ParameterType != parameters[i]) return false;
}
return true;
})
.Single()
.MakeGenericMethod(type);
}
}
}
}
}
#endif

View File

@ -0,0 +1,365 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.IO;
using Datadog.Trace.Vendors.MessagePack.LZ4;
namespace Datadog.Trace.Vendors.MessagePack
{
/// <summary>
/// LZ4 Compressed special serializer.
/// </summary>
internal static partial class LZ4MessagePackSerializer
{
public const sbyte ExtensionTypeCode = 99;
public const int NotCompressionSize = 64;
/// <summary>
/// Serialize to binary with default resolver.
/// </summary>
public static byte[] Serialize<T>(T obj)
{
return Serialize(obj, null);
}
/// <summary>
/// Serialize to binary with specified resolver.
/// </summary>
public static byte[] Serialize<T>(T obj, IFormatterResolver resolver)
{
if (resolver == null) resolver = MessagePackSerializer.DefaultResolver;
var buffer = SerializeCore(obj, resolver);
return MessagePackBinary.FastCloneWithResize(buffer.Array, buffer.Count);
}
/// <summary>
/// Serialize to stream.
/// </summary>
public static void Serialize<T>(Stream stream, T obj)
{
Serialize(stream, obj, null);
}
/// <summary>
/// Serialize to stream with specified resolver.
/// </summary>
public static void Serialize<T>(Stream stream, T obj, IFormatterResolver resolver)
{
if (resolver == null) resolver = MessagePackSerializer.DefaultResolver;
var buffer = SerializeCore(obj, resolver);
stream.Write(buffer.Array, 0, buffer.Count);
}
public static int SerializeToBlock<T>(ref byte[] bytes, int offset, T obj, IFormatterResolver resolver)
{
var serializedData = MessagePackSerializer.SerializeUnsafe(obj, resolver);
if (serializedData.Count < NotCompressionSize)
{
// can't write direct, shoganai...
MessagePackBinary.EnsureCapacity(ref bytes, offset, serializedData.Count);
Buffer.BlockCopy(serializedData.Array, serializedData.Offset, bytes, offset, serializedData.Count);
return serializedData.Count;
}
else
{
var maxOutCount = LZ4Codec.MaximumOutputLength(serializedData.Count);
MessagePackBinary.EnsureCapacity(ref bytes, offset, 6 + 5 + maxOutCount); // (ext header size + fixed length size)
// acquire ext header position
var extHeaderOffset = offset;
offset += (6 + 5);
// write body
var lz4Length = LZ4Codec.Encode(serializedData.Array, serializedData.Offset, serializedData.Count, bytes, offset, bytes.Length - offset);
// write extension header(always 6 bytes)
extHeaderOffset += MessagePackBinary.WriteExtensionFormatHeaderForceExt32Block(ref bytes, extHeaderOffset, (sbyte)ExtensionTypeCode, lz4Length + 5);
// write length(always 5 bytes)
MessagePackBinary.WriteInt32ForceInt32Block(ref bytes, extHeaderOffset, serializedData.Count);
return 6 + 5 + lz4Length;
}
}
public static byte[] ToLZ4Binary(ArraySegment<byte> messagePackBinary)
{
var buffer = ToLZ4BinaryCore(messagePackBinary);
return MessagePackBinary.FastCloneWithResize(buffer.Array, buffer.Count);
}
static ArraySegment<byte> SerializeCore<T>(T obj, IFormatterResolver resolver)
{
var serializedData = MessagePackSerializer.SerializeUnsafe(obj, resolver);
return ToLZ4BinaryCore(serializedData);
}
static ArraySegment<byte> ToLZ4BinaryCore(ArraySegment<byte> serializedData)
{
if (serializedData.Count < NotCompressionSize)
{
return serializedData;
}
else
{
var offset = 0;
var buffer = LZ4MemoryPool.GetBuffer();
var maxOutCount = LZ4Codec.MaximumOutputLength(serializedData.Count);
if (buffer.Length < 6 + 5 + maxOutCount) // (ext header size + fixed length size)
{
buffer = new byte[6 + 5 + maxOutCount];
}
// acquire ext header position
var extHeaderOffset = offset;
offset += (6 + 5);
// write body
var lz4Length = LZ4Codec.Encode(serializedData.Array, serializedData.Offset, serializedData.Count, buffer, offset, buffer.Length - offset);
// write extension header(always 6 bytes)
extHeaderOffset += MessagePackBinary.WriteExtensionFormatHeaderForceExt32Block(ref buffer, extHeaderOffset, (sbyte)ExtensionTypeCode, lz4Length + 5);
// write length(always 5 bytes)
MessagePackBinary.WriteInt32ForceInt32Block(ref buffer, extHeaderOffset, serializedData.Count);
return new ArraySegment<byte>(buffer, 0, 6 + 5 + lz4Length);
}
}
public static T Deserialize<T>(byte[] bytes)
{
return Deserialize<T>(bytes, null);
}
public static T Deserialize<T>(byte[] bytes, IFormatterResolver resolver)
{
return DeserializeCore<T>(new ArraySegment<byte>(bytes, 0, bytes.Length), resolver);
}
public static T Deserialize<T>(ArraySegment<byte> bytes)
{
return DeserializeCore<T>(bytes, null);
}
public static T Deserialize<T>(ArraySegment<byte> bytes, IFormatterResolver resolver)
{
return DeserializeCore<T>(bytes, resolver);
}
public static T Deserialize<T>(Stream stream)
{
return Deserialize<T>(stream, null);
}
public static T Deserialize<T>(Stream stream, IFormatterResolver resolver)
{
return Deserialize<T>(stream, resolver, false);
}
public static T Deserialize<T>(Stream stream, bool readStrict)
{
return Deserialize<T>(stream, MessagePackSerializer.DefaultResolver, readStrict);
}
public static T Deserialize<T>(Stream stream, IFormatterResolver resolver, bool readStrict)
{
if (!readStrict)
{
var buffer = MessagePack.Internal.InternalMemoryPool.GetBuffer(); // use MessagePackSerializer.Pool!
var len = FillFromStream(stream, ref buffer);
return DeserializeCore<T>(new ArraySegment<byte>(buffer, 0, len), resolver);
}
else
{
int blockSize;
var bytes = MessagePackBinary.ReadMessageBlockFromStreamUnsafe(stream, false, out blockSize);
return DeserializeCore<T>(new ArraySegment<byte>(bytes, 0, blockSize), resolver);
}
}
public static byte[] Decode(Stream stream, bool readStrict = false)
{
if (!readStrict)
{
var buffer = MessagePack.Internal.InternalMemoryPool.GetBuffer(); // use MessagePackSerializer.Pool!
var len = FillFromStream(stream, ref buffer);
return Decode(new ArraySegment<byte>(buffer, 0, len));
}
else
{
int blockSize;
var bytes = MessagePackBinary.ReadMessageBlockFromStreamUnsafe(stream, false, out blockSize);
return Decode(new ArraySegment<byte>(bytes, 0, blockSize));
}
}
public static byte[] Decode(byte[] bytes)
{
return Decode(new ArraySegment<byte>(bytes, 0, bytes.Length));
}
public static byte[] Decode(ArraySegment<byte> bytes)
{
int readSize;
if (MessagePackBinary.GetMessagePackType(bytes.Array, bytes.Offset) == MessagePackType.Extension)
{
var header = MessagePackBinary.ReadExtensionFormatHeader(bytes.Array, bytes.Offset, out readSize);
if (header.TypeCode == ExtensionTypeCode)
{
// decode lz4
var offset = bytes.Offset + readSize;
var length = MessagePackBinary.ReadInt32(bytes.Array, offset, out readSize);
offset += readSize;
var buffer = new byte[length]; // use new buffer.
// LZ4 Decode
var len = bytes.Count + bytes.Offset - offset;
LZ4Codec.Decode(bytes.Array, offset, len, buffer, 0, length);
return buffer;
}
}
if (bytes.Offset == 0 && bytes.Array.Length == bytes.Count)
{
// return same reference
return bytes.Array;
}
else
{
var result = new byte[bytes.Count];
Buffer.BlockCopy(bytes.Array, bytes.Offset, result, 0, result.Length);
return result;
}
}
/// <summary>
/// Get the war memory pool byte[]. The result can not share across thread and can not hold and can not call LZ4Deserialize before use it.
/// </summary>
public static byte[] DecodeUnsafe(byte[] bytes)
{
return DecodeUnsafe(new ArraySegment<byte>(bytes, 0, bytes.Length));
}
/// <summary>
/// Get the war memory pool byte[]. The result can not share across thread and can not hold and can not call LZ4Deserialize before use it.
/// </summary>
public static byte[] DecodeUnsafe(ArraySegment<byte> bytes)
{
int readSize;
if (MessagePackBinary.GetMessagePackType(bytes.Array, bytes.Offset) == MessagePackType.Extension)
{
var header = MessagePackBinary.ReadExtensionFormatHeader(bytes.Array, bytes.Offset, out readSize);
if (header.TypeCode == ExtensionTypeCode)
{
// decode lz4
var offset = bytes.Offset + readSize;
var length = MessagePackBinary.ReadInt32(bytes.Array, offset, out readSize);
offset += readSize;
var buffer = LZ4MemoryPool.GetBuffer(); // use LZ4 Pool(Unsafe)
if (buffer.Length < length)
{
buffer = new byte[length];
}
// LZ4 Decode
var len = bytes.Count + bytes.Offset - offset;
LZ4Codec.Decode(bytes.Array, offset, len, buffer, 0, length);
return buffer; // return pooled bytes.
}
}
if (bytes.Offset == 0 && bytes.Array.Length == bytes.Count)
{
// return same reference
return bytes.Array;
}
else
{
var result = new byte[bytes.Count];
Buffer.BlockCopy(bytes.Array, bytes.Offset, result, 0, result.Length);
return result;
}
}
static T DeserializeCore<T>(ArraySegment<byte> bytes, IFormatterResolver resolver)
{
if (resolver == null) resolver = MessagePackSerializer.DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
int readSize;
if (MessagePackBinary.GetMessagePackType(bytes.Array, bytes.Offset) == MessagePackType.Extension)
{
var header = MessagePackBinary.ReadExtensionFormatHeader(bytes.Array, bytes.Offset, out readSize);
if (header.TypeCode == ExtensionTypeCode)
{
// decode lz4
var offset = bytes.Offset + readSize;
var length = MessagePackBinary.ReadInt32(bytes.Array, offset, out readSize);
offset += readSize;
var buffer = LZ4MemoryPool.GetBuffer(); // use LZ4 Pool
if (buffer.Length < length)
{
buffer = new byte[length];
}
// LZ4 Decode
var len = bytes.Count + bytes.Offset - offset;
LZ4Codec.Decode(bytes.Array, offset, len, buffer, 0, length);
return formatter.Deserialize(buffer, 0, resolver, out readSize);
}
}
return formatter.Deserialize(bytes.Array, bytes.Offset, resolver, out readSize);
}
static int FillFromStream(Stream input, ref byte[] buffer)
{
int length = 0;
int read;
while ((read = input.Read(buffer, length, buffer.Length - length)) > 0)
{
length += read;
if (length == buffer.Length)
{
MessagePackBinary.FastResize(ref buffer, length * 2);
}
}
return length;
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class LZ4MemoryPool
{
[ThreadStatic]
static byte[] lz4buffer = null;
public static byte[] GetBuffer()
{
if (lz4buffer == null)
{
lz4buffer = new byte[LZ4.LZ4Codec.MaximumOutputLength(65536)];
}
return lz4buffer;
}
}
}

View File

@ -0,0 +1,98 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netstandard1.6;netstandard2.0;net45;net47</TargetFrameworks>
<NoWarn>$(NoWarn);CS0649</NoWarn>
<AllowUnsafeBlocks>True</AllowUnsafeBlocks>
<DefineConstants>$(DefineConstants);ENABLE_UNSAFE_MSGPACK</DefineConstants>
<CheckForOverflowUnderflow>True</CheckForOverflowUnderflow>
<IsPackable>true</IsPackable>
<Title>MessagePack for C#</Title>
<Description>Extremely Fast MessagePack(MsgPack) Serializer for C#(.NET, .NET Core, Unity, Xamarin).</Description>
<PackageTags>MsgPack;MessagePack;Serialization;Formatter;Serializer;Unity;Xamarin</PackageTags>
<AssemblyName>MessagePack</AssemblyName>
</PropertyGroup>
<ItemGroup>
<Compile Remove="bin\**" />
<Compile Remove="obj\**" />
<EmbeddedResource Remove="bin\**" />
<EmbeddedResource Remove="obj\**" />
<None Remove="*.meta" />
<None Remove="bin\**" />
<None Remove="obj\**" />
<None Remove="Resolvers\*.meta" />
<None Remove="Internal\*.meta" />
<None Remove="Formatters\*.meta" />
<None Remove="LZ4\Codec\*.meta" />
</ItemGroup>
<ItemGroup>
<None Include="LZ4\Codec\LZ4Codec.Safe.cs" />
<None Include="MessagePackSerializer.Typeless.cs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Reflection.Emit" Version="4.3.0" />
<PackageReference Include="System.Reflection.Emit.Lightweight" Version="4.3.0" />
<PackageReference Include="System.Threading.Tasks.Extensions" Version="4.5.3" />
<PackageReference Include="System.Runtime.Serialization.Primitives" Version="4.3.0" />
<PackageReference Include="System.ValueTuple" Version="4.5.0" />
</ItemGroup>
<ItemGroup>
<None Update="Formatters\ForceSizePrimitiveFormatter.tt">
<LastGenOutput>ForceSizePrimitiveFormatter.cs</LastGenOutput>
<Generator>TextTemplatingFileGenerator</Generator>
</None>
<None Update="Formatters\PrimitiveFormatter.tt">
<Generator>TextTemplatingFileGenerator</Generator>
<LastGenOutput>PrimitiveFormatter.cs</LastGenOutput>
</None>
<None Update="Formatters\TupleFormatter.tt">
<Generator>TextTemplatingFileGenerator</Generator>
<LastGenOutput>TupleFormatter.cs</LastGenOutput>
</None>
<None Update="Formatters\ValueTupleFormatter.tt">
<Generator>TextTemplatingFileGenerator</Generator>
<LastGenOutput>ValueTupleFormatter.cs</LastGenOutput>
</None>
<None Update="Internal\UnsafeMemory.tt">
<LastGenOutput>UnsafeMemory.cs</LastGenOutput>
<Generator>TextTemplatingFileGenerator</Generator>
</None>
</ItemGroup>
<ItemGroup>
<Compile Update="Formatters\ForceSizePrimitiveFormatter.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>ForceSizePrimitiveFormatter.tt</DependentUpon>
</Compile>
<Compile Update="Formatters\PrimitiveFormatter.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>PrimitiveFormatter.tt</DependentUpon>
</Compile>
<Compile Update="Formatters\TupleFormatter.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>TupleFormatter.tt</DependentUpon>
</Compile>
<Compile Update="Formatters\ValueTupleFormatter.cs">
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
<DependentUpon>ValueTupleFormatter.tt</DependentUpon>
</Compile>
<Compile Update="Internal\UnsafeMemory.cs">
<DependentUpon>UnsafeMemory.tt</DependentUpon>
<DesignTime>True</DesignTime>
<AutoGen>True</AutoGen>
</Compile>
</ItemGroup>
<ItemGroup>
<Service Include="{508349b6-6b84-4df5-91f0-309beebad82d}" />
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,203 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack
{
/// <summary>
/// https://github.com/msgpack/msgpack/blob/master/spec.md#serialization-type-to-format-conversion
/// </summary>
internal enum MessagePackType : byte
{
Unknown = 0,
Integer = 1,
Nil = 2,
Boolean = 3,
Float = 4,
String = 5,
Binary = 6,
Array = 7,
Map = 8,
Extension = 9,
}
/// <summary>
/// https://github.com/msgpack/msgpack/blob/master/spec.md#overview
/// </summary>
internal static class MessagePackCode
{
public const byte MinFixInt = 0x00; // 0
public const byte MaxFixInt = 0x7f; // 127
public const byte MinFixMap = 0x80; // 128
public const byte MaxFixMap = 0x8f; // 143
public const byte MinFixArray = 0x90; // 144
public const byte MaxFixArray = 0x9f; // 159
public const byte MinFixStr = 0xa0; // 160
public const byte MaxFixStr = 0xbf; // 191
public const byte Nil = 0xc0;
public const byte NeverUsed = 0xc1;
public const byte False = 0xc2;
public const byte True = 0xc3;
public const byte Bin8 = 0xc4;
public const byte Bin16 = 0xc5;
public const byte Bin32 = 0xc6;
public const byte Ext8 = 0xc7;
public const byte Ext16 = 0xc8;
public const byte Ext32 = 0xc9;
public const byte Float32 = 0xca;
public const byte Float64 = 0xcb;
public const byte UInt8 = 0xcc;
public const byte UInt16 = 0xcd;
public const byte UInt32 = 0xce;
public const byte UInt64 = 0xcf;
public const byte Int8 = 0xd0;
public const byte Int16 = 0xd1;
public const byte Int32 = 0xd2;
public const byte Int64 = 0xd3;
public const byte FixExt1 = 0xd4;
public const byte FixExt2 = 0xd5;
public const byte FixExt4 = 0xd6;
public const byte FixExt8 = 0xd7;
public const byte FixExt16 = 0xd8;
public const byte Str8 = 0xd9;
public const byte Str16 = 0xda;
public const byte Str32 = 0xdb;
public const byte Array16 = 0xdc;
public const byte Array32 = 0xdd;
public const byte Map16 = 0xde;
public const byte Map32 = 0xdf;
public const byte MinNegativeFixInt = 0xe0; // 224
public const byte MaxNegativeFixInt = 0xff; // 255
static readonly MessagePackType[] typeLookupTable = new MessagePackType[256];
static readonly string[] formatNameTable = new string[256];
static MessagePackCode()
{
// Init Lookup Table
for (int i = MinFixInt; i <= MaxFixInt; i++)
{
typeLookupTable[i] = MessagePackType.Integer;
formatNameTable[i] = "positive fixint";
}
for (int i = MinFixMap; i <= MaxFixMap; i++)
{
typeLookupTable[i] = MessagePackType.Map;
formatNameTable[i] = "fixmap";
}
for (int i = MinFixArray; i <= MaxFixArray; i++)
{
typeLookupTable[i] = MessagePackType.Array;
formatNameTable[i] = "fixarray";
}
for (int i = MinFixStr; i <= MaxFixStr; i++)
{
typeLookupTable[i] = MessagePackType.String;
formatNameTable[i] = "fixstr";
}
typeLookupTable[Nil] = MessagePackType.Nil;
typeLookupTable[NeverUsed] = MessagePackType.Unknown;
typeLookupTable[False] = MessagePackType.Boolean;
typeLookupTable[True] = MessagePackType.Boolean;
typeLookupTable[Bin8] = MessagePackType.Binary;
typeLookupTable[Bin16] = MessagePackType.Binary;
typeLookupTable[Bin32] = MessagePackType.Binary;
typeLookupTable[Ext8] = MessagePackType.Extension;
typeLookupTable[Ext16] = MessagePackType.Extension;
typeLookupTable[Ext32] = MessagePackType.Extension;
typeLookupTable[Float32] = MessagePackType.Float;
typeLookupTable[Float64] = MessagePackType.Float;
typeLookupTable[UInt8] = MessagePackType.Integer;
typeLookupTable[UInt16] = MessagePackType.Integer;
typeLookupTable[UInt32] = MessagePackType.Integer;
typeLookupTable[UInt64] = MessagePackType.Integer;
typeLookupTable[Int8] = MessagePackType.Integer;
typeLookupTable[Int16] = MessagePackType.Integer;
typeLookupTable[Int32] = MessagePackType.Integer;
typeLookupTable[Int64] = MessagePackType.Integer;
typeLookupTable[FixExt1] = MessagePackType.Extension;
typeLookupTable[FixExt2] = MessagePackType.Extension;
typeLookupTable[FixExt4] = MessagePackType.Extension;
typeLookupTable[FixExt8] = MessagePackType.Extension;
typeLookupTable[FixExt16] = MessagePackType.Extension;
typeLookupTable[Str8] = MessagePackType.String;
typeLookupTable[Str16] = MessagePackType.String;
typeLookupTable[Str32] = MessagePackType.String;
typeLookupTable[Array16] = MessagePackType.Array;
typeLookupTable[Array32] = MessagePackType.Array;
typeLookupTable[Map16] = MessagePackType.Map;
typeLookupTable[Map32] = MessagePackType.Map;
formatNameTable[Nil] = "nil";
formatNameTable[NeverUsed] = "(never used)";
formatNameTable[False] = "false";
formatNameTable[True] = "true";
formatNameTable[Bin8] = "bin 8";
formatNameTable[Bin16] = "bin 16";
formatNameTable[Bin32] = "bin 32";
formatNameTable[Ext8] = "ext 8";
formatNameTable[Ext16] = "ext 16";
formatNameTable[Ext32] = "ext 32";
formatNameTable[Float32] = "float 32";
formatNameTable[Float64] = "float 64";
formatNameTable[UInt8] = "uint 8";
formatNameTable[UInt16] = "uint 16";
formatNameTable[UInt32] = "uint 32";
formatNameTable[UInt64] = "uint 64";
formatNameTable[Int8] = "int 8";
formatNameTable[Int16] = "int 16";
formatNameTable[Int32] = "int 32";
formatNameTable[Int64] = "int 64";
formatNameTable[FixExt1] = "fixext 1";
formatNameTable[FixExt2] = "fixext 2";
formatNameTable[FixExt4] = "fixext 4";
formatNameTable[FixExt8] = "fixext 8";
formatNameTable[FixExt16] = "fixext 16";
formatNameTable[Str8] = "str 8";
formatNameTable[Str16] = "str 16";
formatNameTable[Str32] = "str 32";
formatNameTable[Array16] = "array 16";
formatNameTable[Array32] = "array 32";
formatNameTable[Map16] = "map 16";
formatNameTable[Map32] = "map 32";
for (int i = MinNegativeFixInt; i <= MaxNegativeFixInt; i++)
{
typeLookupTable[i] = MessagePackType.Integer;
formatNameTable[i] = "negative fixint";
}
}
public static MessagePackType ToMessagePackType(byte code)
{
return typeLookupTable[code];
}
public static string ToFormatName(byte code)
{
return formatNameTable[code];
}
}
internal static class ReservedMessagePackExtensionTypeCode
{
public const sbyte DateTime = -1;
}
internal static class MessagePackRange
{
public const int MinFixNegativeInt = -32;
public const int MaxFixNegativeInt = -1;
public const int MaxFixPositiveInt = 127;
public const int MinFixStringLength = 0;
public const int MaxFixStringLength = 31;
public const int MaxFixMapCount = 15;
public const int MaxFixArrayCount = 15;
}
}

View File

@ -0,0 +1,364 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
// Copyright (c) All contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.ExceptionServices;
using System.Threading;
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
namespace Datadog.Trace.Vendors.MessagePack
{
/// <summary>
/// Settings related to security, particularly relevant when deserializing data from untrusted sources.
/// </summary>
internal class MessagePackSecurity
{
/// <summary>
/// The thread-local value tracking recursion for an ongoing deserialization operation.
/// </summary>
private static readonly ThreadLocal<int> ObjectGraphDepth = new ThreadLocal<int>();
/// <summary>
/// Gets an instance preconfigured with settings that omit all protections. Useful for deserializing fully-trusted and valid msgpack sequences.
/// </summary>
public static readonly MessagePackSecurity TrustedData = new MessagePackSecurity();
/// <summary>
/// Gets an instance preconfigured with protections applied with reasonable settings for deserializing untrusted msgpack sequences.
/// </summary>
public static readonly MessagePackSecurity UntrustedData = new MessagePackSecurity
{
HashCollisionResistant = true,
MaximumObjectGraphDepth = 500,
};
/// <summary>
/// The <see cref="MessagePackSecurity"/> instance that is active for all deserialization operations within this AppDomain or process.
/// </summary>
/// <value>Defaults to <see cref="TrustedData"/>.</value>
public static MessagePackSecurity Active = TrustedData;
private readonly ObjectFallbackEqualityComparer objectFallbackEqualityComparer;
private MessagePackSecurity()
{
this.objectFallbackEqualityComparer = new ObjectFallbackEqualityComparer(this);
}
/// <summary>
/// Initializes a new instance of the <see cref="MessagePackSecurity"/> class
/// with properties copied from a provided template.
/// </summary>
/// <param name="copyFrom">The template to copy from.</param>
protected MessagePackSecurity(MessagePackSecurity copyFrom)
{
if (copyFrom is null)
{
throw new ArgumentNullException(nameof(copyFrom));
}
this.HashCollisionResistant = copyFrom.HashCollisionResistant;
this.MaximumObjectGraphDepth = copyFrom.MaximumObjectGraphDepth;
}
/// <summary>
/// Gets a value indicating whether data to be deserialized is untrusted and thus should not be allowed to create
/// dictionaries or other hash-based collections unless the hashed type has a hash collision resistant implementation available.
/// This can mitigate some denial of service attacks when deserializing untrusted code.
/// </summary>
/// <value>
/// The value is <c>false</c> for <see cref="TrustedData"/> and <c>true</c> for <see cref="UntrustedData"/>.
/// </value>
public bool HashCollisionResistant { get; private set; }
/// <summary>
/// Gets the maximum depth of an object graph that may be deserialized.
/// </summary>
/// <remarks>
/// <para>
/// This value can be reduced to avoid a stack overflow that would crash the process when deserializing a msgpack sequence designed to cause deep recursion.
/// A very short callstack on a thread with 1MB of total stack space might deserialize ~2000 nested arrays before crashing due to a stack overflow.
/// Since stack space occupied may vary by the kind of object deserialized, a conservative value for this property to defend against stack overflow attacks might be 500.
/// </para>
/// </remarks>
public int MaximumObjectGraphDepth { get; private set; } = int.MaxValue;
/// <summary>
/// Gets a copy of these options with the <see cref="MaximumObjectGraphDepth"/> property set to a new value.
/// </summary>
/// <param name="maximumObjectGraphDepth">The new value for the <see cref="MaximumObjectGraphDepth"/> property.</param>
/// <returns>The new instance; or the original if the value is unchanged.</returns>
public MessagePackSecurity WithMaximumObjectGraphDepth(int maximumObjectGraphDepth)
{
if (this.MaximumObjectGraphDepth == maximumObjectGraphDepth)
{
return this;
}
var clone = this.Clone();
clone.MaximumObjectGraphDepth = maximumObjectGraphDepth;
return clone;
}
/// <summary>
/// Gets a copy of these options with the <see cref="HashCollisionResistant"/> property set to a new value.
/// </summary>
/// <param name="hashCollisionResistant">The new value for the <see cref="HashCollisionResistant"/> property.</param>
/// <returns>The new instance; or the original if the value is unchanged.</returns>
public MessagePackSecurity WithHashCollisionResistant(bool hashCollisionResistant)
{
if (this.HashCollisionResistant == hashCollisionResistant)
{
return this;
}
var clone = this.Clone();
clone.HashCollisionResistant = hashCollisionResistant;
return clone;
}
/// <summary>
/// Gets an <see cref="IEqualityComparer{T}"/> that is suitable to use with a hash-based collection.
/// </summary>
/// <typeparam name="T">The type of key that will be hashed in the collection.</typeparam>
/// <returns>The <see cref="IEqualityComparer{T}"/> to use.</returns>
/// <remarks>
/// When <see cref="HashCollisionResistant"/> is active, this will be a collision resistant instance which may reject certain key types.
/// When <see cref="HashCollisionResistant"/> is not active, this will be <see cref="EqualityComparer{T}.Default"/>.
/// </remarks>
public IEqualityComparer<T> GetEqualityComparer<T>()
{
return this.HashCollisionResistant ? GetHashCollisionResistantEqualityComparer<T>() : EqualityComparer<T>.Default;
}
/// <summary>
/// Gets an <see cref="IEqualityComparer"/> that is suitable to use with a hash-based collection.
/// </summary>
/// <returns>The <see cref="IEqualityComparer"/> to use.</returns>
/// <remarks>
/// When <see cref="HashCollisionResistant"/> is active, this will be a collision resistant instance which may reject certain key types.
/// When <see cref="HashCollisionResistant"/> is not active, this will be <see cref="EqualityComparer{T}.Default"/>.
/// </remarks>
public IEqualityComparer GetEqualityComparer()
{
return this.HashCollisionResistant ? GetHashCollisionResistantEqualityComparer() : EqualityComparer<object>.Default;
}
/// <summary>
/// Returns a hash collision resistant equality comparer.
/// </summary>
/// <typeparam name="T">The type of key that will be hashed in the collection.</typeparam>
/// <returns>A hash collision resistant equality comparer.</returns>
protected virtual IEqualityComparer<T> GetHashCollisionResistantEqualityComparer<T>()
{
// For anything 32-bits and under, our fallback base secure hasher is usually adequate since it makes the hash unpredictable.
// We should have special implementations for any value that is larger than 32-bits in order to make sure
// that all the data gets hashed securely rather than trivially and predictably compressed into 32-bits before being hashed.
// We also have to specially handle some 32-bit types (e.g. float) where multiple in-memory representations should hash to the same value.
// Any type supported by the PrimitiveObjectFormatter should be added here if supporting it as a key in a collection makes sense.
return
// 32-bits or smaller:
typeof(T) == typeof(bool) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(char) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(sbyte) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(byte) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(short) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(ushort) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(int) ? CollisionResistantHasher<T>.Instance :
typeof(T) == typeof(uint) ? CollisionResistantHasher<T>.Instance :
// Larger than 32-bits (or otherwise require special handling):
typeof(T) == typeof(long) ? (IEqualityComparer<T>)Int64EqualityComparer.Instance :
typeof(T) == typeof(ulong) ? (IEqualityComparer<T>)UInt64EqualityComparer.Instance :
typeof(T) == typeof(string) ? (IEqualityComparer<T>)StringEqualityComparer.Instance :
typeof(T) == typeof(object) ? (IEqualityComparer<T>)this.objectFallbackEqualityComparer :
// Any type we don't explicitly whitelist here shouldn't be allowed to use as the key in a hash-based collection since it isn't known to be hash resistant.
// This method can of course be overridden to add more hash collision resistant type support, or the deserializing party can indicate that the data is Trusted
// so that this method doesn't even get called.
throw new TypeAccessException($"No hash-resistant equality comparer available for type: {typeof(T)}");
}
/// <summary>
/// Should be called within the expression of a <c>using</c> statement around which a <see cref="IMessagePackFormatter{T}.Deserialize"/> method
/// deserializes a sub-element.
/// </summary>
/// <returns>A value to be disposed of when deserializing the sub-element is complete.</returns>
/// <exception cref="InsufficientExecutionStackException">Thrown when the depth of the object graph being deserialized exceeds <see cref="MaximumObjectGraphDepth"/>.</exception>
/// <remarks>
/// Rather than wrap the body of every <see cref="IMessagePackFormatter{T}.Deserialize"/> method,
/// this should wrap *calls* to these methods. They need not appear in pure "thunk" methods that simply delegate the deserialization to another formatter.
/// In this way, we can avoid repeatedly incrementing and decrementing the counter when deserializing each element of a collection.
/// </remarks>
public static ObjectGraphDepthStep DepthStep()
{
int max = Active.MaximumObjectGraphDepth;
if (max < int.MaxValue)
{
int current = ObjectGraphDepth.Value;
if (current >= max)
{
throw new InsufficientExecutionStackException($"This msgpack sequence has an object graph that exceeds the maximum depth allowed of {max}.");
}
ObjectGraphDepth.Value = current + 1;
return new ObjectGraphDepthStep(true);
}
return default;
}
/// <summary>
/// Returns a hash collision resistant equality comparer.
/// </summary>
/// <returns>A hash collision resistant equality comparer.</returns>
protected virtual IEqualityComparer GetHashCollisionResistantEqualityComparer() => (IEqualityComparer)this.GetHashCollisionResistantEqualityComparer<object>();
/// <summary>
/// Creates a new instance that is a copy of this one.
/// </summary>
/// <remarks>
/// Derived types should override this method to instantiate their own derived type.
/// </remarks>
protected virtual MessagePackSecurity Clone() => new MessagePackSecurity(this);
/// <summary>
/// The struct returned from <see cref="DepthStep"/>
/// that when disposed will decrement the object graph depth counter to reverse
/// the effect of the call to <see cref="DepthStep"/>.
/// </summary>
internal struct ObjectGraphDepthStep : IDisposable
{
private readonly bool active;
internal ObjectGraphDepthStep(bool active)
{
this.active = active;
}
/// <inheritdoc />
public void Dispose()
{
if (this.active)
{
ObjectGraphDepth.Value--;
}
}
}
/// <summary>
/// A hash collision resistant implementation of <see cref="IEqualityComparer{T}"/>.
/// </summary>
/// <typeparam name="T">The type of key that will be hashed.</typeparam>
private class CollisionResistantHasher<T> : IEqualityComparer<T>, IEqualityComparer
{
internal static readonly CollisionResistantHasher<T> Instance = new CollisionResistantHasher<T>();
public bool Equals(T x, T y) => EqualityComparer<T>.Default.Equals(x, y);
bool IEqualityComparer.Equals(object x, object y) => ((IEqualityComparer)EqualityComparer<T>.Default).Equals(x, y);
public int GetHashCode(object obj) => this.GetHashCode((T)obj);
public virtual int GetHashCode(T value) => HashCode.Combine(value);
}
/// <summary>
/// A special hash-resistent equality comparer that defers picking the actual implementation
/// till it can check the runtime type of each value to be hashed.
/// </summary>
private class ObjectFallbackEqualityComparer : IEqualityComparer<object>, IEqualityComparer
{
private static readonly object[] EmptyObjectArray = new object[0];
private static readonly MethodInfo GetHashCollisionResistantEqualityComparerOpenGenericMethod = typeof(MessagePackSecurity).GetTypeInfo().DeclaredMethods.Single(m => m.Name == nameof(MessagePackSecurity.GetHashCollisionResistantEqualityComparer) && m.IsGenericMethod);
private readonly MessagePackSecurity security;
private readonly ThreadsafeTypeKeyHashTable<IEqualityComparer> equalityComparerCache = new ThreadsafeTypeKeyHashTable<IEqualityComparer>();
internal ObjectFallbackEqualityComparer(MessagePackSecurity security)
{
this.security = security ?? throw new ArgumentNullException(nameof(security));
}
bool IEqualityComparer<object>.Equals(object x, object y) => EqualityComparer<object>.Default.Equals(x, y);
bool IEqualityComparer.Equals(object x, object y) => ((IEqualityComparer)EqualityComparer<object>.Default).Equals(x, y);
public int GetHashCode(object value)
{
if (value is null)
{
return 0;
}
Type valueType = value.GetType();
// Take care to avoid recursion.
if (valueType == typeof(object))
{
// We can trust object.GetHashCode() to be collision resistant.
return value.GetHashCode();
}
if (!equalityComparerCache.TryGetValue(valueType, out IEqualityComparer equalityComparer))
{
try
{
equalityComparer = (IEqualityComparer)GetHashCollisionResistantEqualityComparerOpenGenericMethod.MakeGenericMethod(valueType).Invoke(this.security, EmptyObjectArray);
}
catch (TargetInvocationException ex)
{
ExceptionDispatchInfo.Capture(ex.InnerException).Throw();
}
equalityComparerCache.TryAdd(valueType, equalityComparer);
}
return equalityComparer.GetHashCode(value);
}
}
private class UInt64EqualityComparer : CollisionResistantHasher<ulong>
{
internal static new readonly UInt64EqualityComparer Instance = new UInt64EqualityComparer();
public override int GetHashCode(ulong value) => HashCode.Combine((uint)(value >> 32), unchecked((uint)value));
}
private class Int64EqualityComparer : CollisionResistantHasher<long>
{
internal static new readonly Int64EqualityComparer Instance = new Int64EqualityComparer();
public override int GetHashCode(long value) => HashCode.Combine((int)(value >> 32), unchecked((int)value));
}
private class StringEqualityComparer : CollisionResistantHasher<string>
{
internal static new readonly StringEqualityComparer Instance = new StringEqualityComparer();
public override int GetHashCode(string value)
{
#if NETCOREAPP
// .NET Core already has a secure string hashing function. Just use it.
return value?.GetHashCode() ?? 0;
#else
var hash = default(HashCode);
for (int i = 0; i < value.Length; i++)
{
hash.Add(value[i]);
}
return hash.ToHashCode();
#endif
}
}
}
}

View File

@ -0,0 +1,339 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.Globalization;
using System.IO;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack
{
// JSON API
internal static partial class MessagePackSerializer
{
/// <summary>
/// Dump to JSON string.
/// </summary>
public static string ToJson<T>(T obj)
{
return ToJson(Serialize(obj));
}
/// <summary>
/// Dump to JSON string.
/// </summary>
public static string ToJson<T>(T obj, IFormatterResolver resolver)
{
return ToJson(Serialize(obj, resolver));
}
/// <summary>
/// Dump message-pack binary to JSON string.
/// </summary>
public static string ToJson(byte[] bytes)
{
if (bytes == null || bytes.Length == 0) return "";
var sb = new StringBuilder();
ToJsonCore(bytes, 0, sb);
return sb.ToString();
}
public static byte[] FromJson(string str)
{
using (var sr = new StringReader(str))
{
return FromJson(sr);
}
}
/// <summary>
/// From Json String to MessagePack binary
/// </summary>
public static byte[] FromJson(TextReader reader)
{
var offset = 0;
byte[] binary = null;
using (var jr = new TinyJsonReader(reader, false))
{
FromJsonCore(jr, ref binary, ref offset);
}
MessagePackBinary.FastResize(ref binary, offset);
return binary;
}
/// <summary>
/// return buffer is from memory pool, be careful to use.
/// </summary>
internal static ArraySegment<byte> FromJsonUnsafe(TextReader reader)
{
var offset = 0;
byte[] binary = InternalMemoryPool.GetBuffer(); // from memory pool.
using (var jr = new TinyJsonReader(reader, false))
{
FromJsonCore(jr, ref binary, ref offset);
}
return new ArraySegment<byte>(binary, 0, offset);
}
static uint FromJsonCore(TinyJsonReader jr, ref byte[] binary, ref int offset)
{
uint count = 0;
while (jr.Read())
{
switch (jr.TokenType)
{
case TinyJsonToken.None:
break;
case TinyJsonToken.StartObject:
{
var startOffset = offset;
offset += 5;
var mapCount = FromJsonCore(jr, ref binary, ref offset);
mapCount = mapCount / 2; // remove propertyname string count.
MessagePackBinary.WriteMapHeaderForceMap32Block(ref binary, startOffset, mapCount);
count++;
break;
}
case TinyJsonToken.EndObject:
return count; // break
case TinyJsonToken.StartArray:
{
var startOffset = offset;
offset += 5;
var arrayCount = FromJsonCore(jr, ref binary, ref offset);
MessagePackBinary.WriteArrayHeaderForceArray32Block(ref binary, startOffset, arrayCount);
count++;
break;
}
case TinyJsonToken.EndArray:
return count; // break
case TinyJsonToken.Number:
var v = jr.ValueType;
if (v == ValueType.Double)
{
offset += MessagePackBinary.WriteDouble(ref binary, offset, jr.DoubleValue);
}
else if (v == ValueType.Long)
{
offset += MessagePackBinary.WriteInt64(ref binary, offset, jr.LongValue);
}
else if (v == ValueType.ULong)
{
offset += MessagePackBinary.WriteUInt64(ref binary, offset, jr.ULongValue);
}
else if (v == ValueType.Decimal)
{
offset += DecimalFormatter.Instance.Serialize(ref binary, offset, jr.DecimalValue, null);
}
count++;
break;
case TinyJsonToken.String:
offset += MessagePackBinary.WriteString(ref binary, offset, jr.StringValue);
count++;
break;
case TinyJsonToken.True:
offset += MessagePackBinary.WriteBoolean(ref binary, offset, true);
count++;
break;
case TinyJsonToken.False:
offset += MessagePackBinary.WriteBoolean(ref binary, offset, false);
count++;
break;
case TinyJsonToken.Null:
offset += MessagePackBinary.WriteNil(ref binary, offset);
count++;
break;
default:
break;
}
}
return count;
}
static int ToJsonCore(byte[] bytes, int offset, StringBuilder builder)
{
var readSize = 0;
var type = MessagePackBinary.GetMessagePackType(bytes, offset);
switch (type)
{
case MessagePackType.Integer:
var code = bytes[offset];
if (MessagePackCode.MinNegativeFixInt <= code && code <= MessagePackCode.MaxNegativeFixInt) builder.Append(MessagePackBinary.ReadSByte(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (MessagePackCode.MinFixInt <= code && code <= MessagePackCode.MaxFixInt) builder.Append(MessagePackBinary.ReadByte(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.Int8) builder.Append(MessagePackBinary.ReadSByte(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.Int16) builder.Append(MessagePackBinary.ReadInt16(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.Int32) builder.Append(MessagePackBinary.ReadInt32(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.Int64) builder.Append(MessagePackBinary.ReadInt64(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.UInt8) builder.Append(MessagePackBinary.ReadByte(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.UInt16) builder.Append(MessagePackBinary.ReadUInt16(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.UInt32) builder.Append(MessagePackBinary.ReadUInt32(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
else if (code == MessagePackCode.UInt64) builder.Append(MessagePackBinary.ReadUInt64(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
break;
case MessagePackType.Boolean:
builder.Append(MessagePackBinary.ReadBoolean(bytes, offset, out readSize) ? "true" : "false");
break;
case MessagePackType.Float:
var floatCode = bytes[offset];
if (floatCode == MessagePackCode.Float32)
{
builder.Append(MessagePackBinary.ReadSingle(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
}
else
{
builder.Append(MessagePackBinary.ReadDouble(bytes, offset, out readSize).ToString(System.Globalization.CultureInfo.InvariantCulture));
}
break;
case MessagePackType.String:
WriteJsonString(MessagePackBinary.ReadString(bytes, offset, out readSize), builder);
break;
case MessagePackType.Binary:
builder.Append("\"" + Convert.ToBase64String(MessagePackBinary.ReadBytes(bytes, offset, out readSize)) + "\"");
break;
case MessagePackType.Array:
{
var length = MessagePackBinary.ReadArrayHeaderRaw(bytes, offset, out readSize);
var totalReadSize = readSize;
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
builder.Append("[");
for (int i = 0; i < length; i++)
{
readSize = ToJsonCore(bytes, offset, builder);
offset += readSize;
totalReadSize += readSize;
if (i != length - 1)
{
builder.Append(",");
}
}
builder.Append("]");
}
return totalReadSize;
}
case MessagePackType.Map:
{
var length = MessagePackBinary.ReadMapHeaderRaw(bytes, offset, out readSize);
var totalReadSize = readSize;
offset += readSize;
using (MessagePackSecurity.DepthStep())
{
builder.Append("{");
for (int i = 0; i < length; i++)
{
// write key
{
var keyType = MessagePackBinary.GetMessagePackType(bytes, offset);
if (keyType == MessagePackType.String || keyType == MessagePackType.Binary)
{
readSize = ToJsonCore(bytes, offset, builder);
}
else
{
builder.Append("\"");
readSize = ToJsonCore(bytes, offset, builder);
builder.Append("\"");
}
offset += readSize;
totalReadSize += readSize;
}
builder.Append(":");
// write body
{
readSize = ToJsonCore(bytes, offset, builder);
offset += readSize;
totalReadSize += readSize;
}
if (i != length - 1)
{
builder.Append(",");
}
}
builder.Append("}");
}
return totalReadSize;
}
case MessagePackType.Extension:
var extHeader = MessagePackBinary.ReadExtensionFormatHeader(bytes, offset, out readSize);
if (extHeader.TypeCode == ReservedMessagePackExtensionTypeCode.DateTime)
{
var dt = MessagePackBinary.ReadDateTime(bytes, offset, out readSize);
builder.Append("\"");
builder.Append(dt.ToString("o", CultureInfo.InvariantCulture));
builder.Append("\"");
}
else
{
var ext = MessagePackBinary.ReadExtensionFormat(bytes, offset, out readSize);
builder.Append("[");
builder.Append(ext.TypeCode);
builder.Append(",");
builder.Append("\"");
builder.Append(Convert.ToBase64String(ext.Data));
builder.Append("\"");
builder.Append("]");
}
break;
case MessagePackType.Unknown:
case MessagePackType.Nil:
default:
readSize = 1;
builder.Append("null");
break;
}
return readSize;
}
// escape string
static void WriteJsonString(string value, StringBuilder builder)
{
builder.Append('\"');
var len = value.Length;
for (int i = 0; i < len; i++)
{
var c = value[i];
switch (c)
{
case '"':
builder.Append("\\\"");
break;
case '\\':
builder.Append("\\\\");
break;
case '\b':
builder.Append("\\b");
break;
case '\f':
builder.Append("\\f");
break;
case '\n':
builder.Append("\\n");
break;
case '\r':
builder.Append("\\r");
break;
case '\t':
builder.Append("\\t");
break;
default:
builder.Append(c);
break;
}
}
builder.Append('\"');
}
}
}

View File

@ -0,0 +1,324 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if NETSTANDARD || NETFRAMEWORK
using System;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack
{
// NonGeneric API
internal static partial class MessagePackSerializer
{
internal static class NonGeneric
{
delegate int RawFormatterSerialize(ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver);
delegate object RawFormatterDeserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize);
static readonly Func<Type, CompiledMethods> CreateCompiledMethods;
static readonly MessagePack.Internal.ThreadsafeTypeKeyHashTable<CompiledMethods> serializes = new MessagePack.Internal.ThreadsafeTypeKeyHashTable<CompiledMethods>(capacity: 64);
static NonGeneric()
{
CreateCompiledMethods = t => new CompiledMethods(t);
}
public static byte[] Serialize(Type type, object obj)
{
return GetOrAdd(type).serialize1.Invoke(obj);
}
public static byte[] Serialize(Type type, object obj, IFormatterResolver resolver)
{
return GetOrAdd(type).serialize2.Invoke(obj, resolver);
}
public static void Serialize(Type type, Stream stream, object obj)
{
GetOrAdd(type).serialize3.Invoke(stream, obj);
}
public static void Serialize(Type type, Stream stream, object obj, IFormatterResolver resolver)
{
GetOrAdd(type).serialize4.Invoke(stream, obj, resolver);
}
public static int Serialize(Type type, ref byte[] bytes, int offset, object value, IFormatterResolver resolver)
{
return GetOrAdd(type).serialize5.Invoke(ref bytes, offset, value, resolver);
}
public static object Deserialize(Type type, byte[] bytes)
{
return GetOrAdd(type).deserialize1.Invoke(bytes);
}
public static object Deserialize(Type type, byte[] bytes, IFormatterResolver resolver)
{
return GetOrAdd(type).deserialize2.Invoke(bytes, resolver);
}
public static object Deserialize(Type type, Stream stream)
{
return GetOrAdd(type).deserialize3.Invoke(stream);
}
public static object Deserialize(Type type, Stream stream, IFormatterResolver resolver)
{
return GetOrAdd(type).deserialize4.Invoke(stream, resolver);
}
public static object Deserialize(Type type, Stream stream, bool readStrict)
{
return GetOrAdd(type).deserialize5.Invoke(stream, readStrict);
}
public static object Deserialize(Type type, Stream stream, IFormatterResolver resolver, bool readStrict)
{
return GetOrAdd(type).deserialize6.Invoke(stream, resolver, readStrict);
}
public static object Deserialize(Type type, ArraySegment<byte> bytes)
{
return GetOrAdd(type).deserialize7.Invoke(bytes);
}
public static object Deserialize(Type type, ArraySegment<byte> bytes, IFormatterResolver resolver)
{
return GetOrAdd(type).deserialize8.Invoke(bytes, resolver);
}
public static object Deserialize(Type type, byte[] bytes, int offset, IFormatterResolver resolver, out int readSize)
{
return GetOrAdd(type).deserialize9.Invoke(bytes, offset, resolver, out readSize);
}
static CompiledMethods GetOrAdd(Type type)
{
return serializes.GetOrAdd(type, CreateCompiledMethods);
}
class CompiledMethods
{
public readonly Func<object, byte[]> serialize1;
public readonly Func<object, IFormatterResolver, byte[]> serialize2;
public readonly Action<Stream, object> serialize3;
public readonly Action<Stream, object, IFormatterResolver> serialize4;
public readonly RawFormatterSerialize serialize5;
public readonly Func<byte[], object> deserialize1;
public readonly Func<byte[], IFormatterResolver, object> deserialize2;
public readonly Func<Stream, object> deserialize3;
public readonly Func<Stream, IFormatterResolver, object> deserialize4;
public readonly Func<Stream, bool, object> deserialize5;
public readonly Func<Stream, IFormatterResolver, bool, object> deserialize6;
public readonly Func<ArraySegment<byte>, object> deserialize7;
public readonly Func<ArraySegment<byte>, IFormatterResolver, object> deserialize8;
public readonly RawFormatterDeserialize deserialize9;
public CompiledMethods(Type type)
{
var ti = type.GetTypeInfo();
{
// public static byte[] Serialize<T>(T obj)
var serialize = GetMethod(type, new Type[] { null });
var param1 = Expression.Parameter(typeof(object), "obj");
var body = Expression.Call(serialize, ti.IsValueType
? Expression.Unbox(param1, type)
: Expression.Convert(param1, type));
var lambda = Expression.Lambda<Func<object, byte[]>>(body, param1).Compile();
this.serialize1 = lambda;
}
{
// public static byte[] Serialize<T>(T obj, IFormatterResolver resolver)
var serialize = GetMethod(type, new Type[] { null, typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(object), "obj");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "formatterResolver");
var body = Expression.Call(serialize, ti.IsValueType
? Expression.Unbox(param1, type)
: Expression.Convert(param1, type), param2);
var lambda = Expression.Lambda<Func<object, IFormatterResolver, byte[]>>(body, param1, param2).Compile();
this.serialize2 = lambda;
}
{
// public static void Serialize<T>(Stream stream, T obj)
var serialize = GetMethod(type, new Type[] { typeof(Stream), null });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(object), "obj");
var body = Expression.Call(serialize, param1, ti.IsValueType
? Expression.Unbox(param2, type)
: Expression.Convert(param2, type));
var lambda = Expression.Lambda<Action<Stream, object>>(body, param1, param2).Compile();
this.serialize3 = lambda;
}
{
// public static void Serialize<T>(Stream stream, T obj, IFormatterResolver resolver)
var serialize = GetMethod(type, new Type[] { typeof(Stream), null, typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(object), "obj");
var param3 = Expression.Parameter(typeof(IFormatterResolver), "formatterResolver");
var body = Expression.Call(serialize, param1, ti.IsValueType
? Expression.Unbox(param2, type)
: Expression.Convert(param2, type), param3);
var lambda = Expression.Lambda<Action<Stream, object, IFormatterResolver>>(body, param1, param2, param3).Compile();
this.serialize4 = lambda;
}
{
// delegate int RawFormatterSerialize(ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver);
var serialize = GetMethod(type, new Type[] { typeof(byte[]).MakeByRefType(), typeof(int), null, typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(byte[]).MakeByRefType(), "bytes");
var param2 = Expression.Parameter(typeof(int), "offset");
var param3 = Expression.Parameter(typeof(object), "value");
var param4 = Expression.Parameter(typeof(IFormatterResolver), "formatterResolver");
var body = Expression.Call(serialize, param1, param2, ti.IsValueType
? Expression.Unbox(param3, type)
: Expression.Convert(param3, type), param4);
var lambda = Expression.Lambda<RawFormatterSerialize>(body, param1, param2, param3, param4).Compile();
this.serialize5 = lambda;
}
{
// public static T Deserialize<T>(byte[] bytes)
var deserialize = GetMethod(type, new Type[] { typeof(byte[]) });
var param1 = Expression.Parameter(typeof(byte[]), "bytes");
var body = Expression.Convert(Expression.Call(deserialize, param1), typeof(object));
var lambda = Expression.Lambda<Func<byte[], object>>(body, param1).Compile();
this.deserialize1 = lambda;
}
{
// public static T Deserialize<T>(byte[] bytes, IFormatterResolver resolver)
var deserialize = GetMethod(type, new Type[] { typeof(byte[]), typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(byte[]), "bytes");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<byte[], IFormatterResolver, object>>(body, param1, param2).Compile();
this.deserialize2 = lambda;
}
{
// public static T Deserialize<T>(Stream stream)
var deserialize = GetMethod(type, new Type[] { typeof(Stream) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var body = Expression.Convert(Expression.Call(deserialize, param1), typeof(object));
var lambda = Expression.Lambda<Func<Stream, object>>(body, param1).Compile();
this.deserialize3 = lambda;
}
{
// public static T Deserialize<T>(Stream stream, IFormatterResolver resolver)
var deserialize = GetMethod(type, new Type[] { typeof(Stream), typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<Stream, IFormatterResolver, object>>(body, param1, param2).Compile();
this.deserialize4 = lambda;
}
{
// public static T Deserialize<T>(Stream stream, bool readStrict)
var deserialize = GetMethod(type, new Type[] { typeof(Stream), typeof(bool) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(bool), "readStrict");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<Stream, bool, object>>(body, param1, param2).Compile();
this.deserialize5 = lambda;
}
{
// public static T Deserialize<T>(Stream stream, IFormatterResolver resolver, bool readStrict)
var deserialize = GetMethod(type, new Type[] { typeof(Stream), typeof(IFormatterResolver), typeof(bool) });
var param1 = Expression.Parameter(typeof(Stream), "stream");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var param3 = Expression.Parameter(typeof(bool), "readStrict");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2, param3), typeof(object));
var lambda = Expression.Lambda<Func<Stream, IFormatterResolver, bool, object>>(body, param1, param2, param3).Compile();
this.deserialize6 = lambda;
}
{
// public static T Deserialize<T>(ArraySegment<byte> bytes)
var deserialize = GetMethod(type, new Type[] { typeof(ArraySegment<byte>) });
var param1 = Expression.Parameter(typeof(ArraySegment<byte>), "bytes");
var body = Expression.Convert(Expression.Call(deserialize, param1), typeof(object));
var lambda = Expression.Lambda<Func<ArraySegment<byte>, object>>(body, param1).Compile();
this.deserialize7 = lambda;
}
{
// public static T Deserialize<T>(ArraySegment<byte> bytes, IFormatterResolver resolver)
var deserialize = GetMethod(type, new Type[] { typeof(ArraySegment<byte>), typeof(IFormatterResolver) });
var param1 = Expression.Parameter(typeof(ArraySegment<byte>), "bytes");
var param2 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2), typeof(object));
var lambda = Expression.Lambda<Func<ArraySegment<byte>, IFormatterResolver, object>>(body, param1, param2).Compile();
this.deserialize8 = lambda;
}
{
// public static T Deserialize<T>(byte[] bytes, int offset, IFormatterResolver resolver, out int readSize)
var deserialize = GetMethod(type, new Type[] { typeof(byte[]), typeof(int), typeof(IFormatterResolver), typeof(int).MakeByRefType() });
var param1 = Expression.Parameter(typeof(byte[]), "bytes");
var param2 = Expression.Parameter(typeof(int), "offset");
var param3 = Expression.Parameter(typeof(IFormatterResolver), "resolver");
var param4 = Expression.Parameter(typeof(int).MakeByRefType(), "readSize");
var body = Expression.Convert(Expression.Call(deserialize, param1, param2, param3, param4), typeof(object));
var lambda = Expression.Lambda<RawFormatterDeserialize>(body, param1, param2, param3, param4).Compile();
this.deserialize9 = lambda;
}
}
// null is generic type marker.
static MethodInfo GetMethod(Type type, Type[] parameters)
{
return typeof(MessagePackSerializer).GetRuntimeMethods().Where(x =>
{
if (!(x.Name == "Serialize" || x.Name == "Deserialize")) return false;
var ps = x.GetParameters();
if (ps.Length != parameters.Length) return false;
for (int i = 0; i < ps.Length; i++)
{
if (parameters[i] == null && ps[i].ParameterType.IsGenericParameter) continue;
if (ps[i].ParameterType != parameters[i]) return false;
}
return true;
})
.Single()
.MakeGenericMethod(type);
}
}
}
}
}
#endif

View File

@ -0,0 +1,331 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.IO;
namespace Datadog.Trace.Vendors.MessagePack
{
/// <summary>
/// High-Level API of MessagePack for C#.
/// </summary>
internal static partial class MessagePackSerializer
{
static IFormatterResolver defaultResolver;
/// <summary>
/// FormatterResolver that used resolver less overloads. If does not set it, used StandardResolver.
/// </summary>
public static IFormatterResolver DefaultResolver
{
get
{
if (defaultResolver == null)
{
defaultResolver = MessagePack.Resolvers.StandardResolver.Instance;
}
return defaultResolver;
}
}
/// <summary>
/// Is resolver decided?
/// </summary>
public static bool IsInitialized
{
get
{
return defaultResolver != null;
}
}
/// <summary>
/// Set default resolver of MessagePackSerializer APIs.
/// </summary>
/// <param name="resolver"></param>
public static void SetDefaultResolver(IFormatterResolver resolver)
{
defaultResolver = resolver;
}
/// <summary>
/// Serialize to binary with default resolver.
/// </summary>
public static byte[] Serialize<T>(T obj)
{
return Serialize(obj, defaultResolver);
}
/// <summary>
/// Serialize to binary with specified resolver.
/// </summary>
public static byte[] Serialize<T>(T obj, IFormatterResolver resolver)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
var buffer = InternalMemoryPool.GetBuffer();
var len = formatter.Serialize(ref buffer, 0, obj, resolver);
// do not return MemoryPool.Buffer.
return MessagePackBinary.FastCloneWithResize(buffer, len);
}
/// <summary>
/// Serialize to binary. Get the raw memory pool byte[]. The result can not share across thread and can not hold, so use quickly.
/// </summary>
public static ArraySegment<byte> SerializeUnsafe<T>(T obj)
{
return SerializeUnsafe(obj, defaultResolver);
}
/// <summary>
/// Serialize to binary with specified resolver. Get the raw memory pool byte[]. The result can not share across thread and can not hold, so use quickly.
/// </summary>
public static ArraySegment<byte> SerializeUnsafe<T>(T obj, IFormatterResolver resolver)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
var buffer = InternalMemoryPool.GetBuffer();
var len = formatter.Serialize(ref buffer, 0, obj, resolver);
// return raw memory pool, unsafe!
return new ArraySegment<byte>(buffer, 0, len);
}
/// <summary>
/// Serialize to stream.
/// </summary>
public static void Serialize<T>(Stream stream, T obj)
{
Serialize(stream, obj, defaultResolver);
}
/// <summary>
/// Serialize to stream with specified resolver.
/// </summary>
public static void Serialize<T>(Stream stream, T obj, IFormatterResolver resolver)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
var buffer = InternalMemoryPool.GetBuffer();
var len = formatter.Serialize(ref buffer, 0, obj, resolver);
// do not need resize.
stream.Write(buffer, 0, len);
}
/// <summary>
/// Reflect of resolver.GetFormatterWithVerify[T].Serialize.
/// </summary>
public static int Serialize<T>(ref byte[] bytes, int offset, T value, IFormatterResolver resolver)
{
return resolver.GetFormatterWithVerify<T>().Serialize(ref bytes, offset, value, resolver);
}
#if NETSTANDARD || NETFRAMEWORK
/// <summary>
/// Serialize to stream(async).
/// </summary>
public static System.Threading.Tasks.Task SerializeAsync<T>(Stream stream, T obj)
{
return SerializeAsync(stream, obj, defaultResolver);
}
/// <summary>
/// Serialize to stream(async) with specified resolver.
/// </summary>
public static async System.Threading.Tasks.Task SerializeAsync<T>(Stream stream, T obj, IFormatterResolver resolver)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
var rentBuffer = BufferPool.Default.Rent();
try
{
var buffer = rentBuffer;
var len = formatter.Serialize(ref buffer, 0, obj, resolver);
// do not need resize.
await stream.WriteAsync(buffer, 0, len).ConfigureAwait(false);
}
finally
{
BufferPool.Default.Return(rentBuffer);
}
}
#endif
public static T Deserialize<T>(byte[] bytes)
{
return Deserialize<T>(bytes, defaultResolver);
}
public static T Deserialize<T>(byte[] bytes, IFormatterResolver resolver)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
int readSize;
return formatter.Deserialize(bytes, 0, resolver, out readSize);
}
public static T Deserialize<T>(ArraySegment<byte> bytes)
{
return Deserialize<T>(bytes, defaultResolver);
}
public static T Deserialize<T>(ArraySegment<byte> bytes, IFormatterResolver resolver)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
int readSize;
return formatter.Deserialize(bytes.Array, bytes.Offset, resolver, out readSize);
}
public static T Deserialize<T>(Stream stream)
{
return Deserialize<T>(stream, defaultResolver);
}
public static T Deserialize<T>(Stream stream, IFormatterResolver resolver)
{
return Deserialize<T>(stream, resolver, false);
}
public static T Deserialize<T>(Stream stream, bool readStrict)
{
return Deserialize<T>(stream, defaultResolver, readStrict);
}
public static T Deserialize<T>(Stream stream, IFormatterResolver resolver, bool readStrict)
{
if (resolver == null) resolver = DefaultResolver;
var formatter = resolver.GetFormatterWithVerify<T>();
if (!readStrict)
{
#if NETSTANDARD && !NET45
var ms = stream as MemoryStream;
if (ms != null)
{
// optimize for MemoryStream
ArraySegment<byte> buffer;
if (ms.TryGetBuffer(out buffer))
{
int readSize;
return formatter.Deserialize(buffer.Array, buffer.Offset, resolver, out readSize);
}
}
#endif
// no else.
{
var buffer = InternalMemoryPool.GetBuffer();
FillFromStream(stream, ref buffer);
int readSize;
return formatter.Deserialize(buffer, 0, resolver, out readSize);
}
}
else
{
int _;
var bytes = MessagePackBinary.ReadMessageBlockFromStreamUnsafe(stream, false, out _);
int readSize;
return formatter.Deserialize(bytes, 0, resolver, out readSize);
}
}
/// <summary>
/// Reflect of resolver.GetFormatterWithVerify[T].Deserialize.
/// </summary>
public static T Deserialize<T>(byte[] bytes, int offset, IFormatterResolver resolver, out int readSize)
{
return resolver.GetFormatterWithVerify<T>().Deserialize(bytes, offset, resolver, out readSize);
}
#if NETSTANDARD || NETFRAMEWORK
public static System.Threading.Tasks.Task<T> DeserializeAsync<T>(Stream stream)
{
return DeserializeAsync<T>(stream, defaultResolver);
}
// readStrict async read is too slow(many Task garbage) so I don't provide async option.
public static async System.Threading.Tasks.Task<T> DeserializeAsync<T>(Stream stream, IFormatterResolver resolver)
{
var rentBuffer = BufferPool.Default.Rent();
var buf = rentBuffer;
try
{
int length = 0;
int read;
while ((read = await stream.ReadAsync(buf, length, buf.Length - length).ConfigureAwait(false)) > 0)
{
length += read;
if (length == buf.Length)
{
MessagePackBinary.FastResize(ref buf, length * 2);
}
}
return Deserialize<T>(buf, resolver);
}
finally
{
BufferPool.Default.Return(rentBuffer);
}
}
#endif
static int FillFromStream(Stream input, ref byte[] buffer)
{
int length = 0;
int read;
while ((read = input.Read(buffer, length, buffer.Length - length)) > 0)
{
length += read;
if (length == buffer.Length)
{
MessagePackBinary.FastResize(ref buffer, length * 2);
}
}
return length;
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class InternalMemoryPool
{
[ThreadStatic]
static byte[] buffer = null;
public static byte[] GetBuffer()
{
if (buffer == null)
{
buffer = new byte[65536];
}
return buffer;
}
}
}

View File

@ -0,0 +1,77 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System;
namespace Datadog.Trace.Vendors.MessagePack
{
internal struct Nil : IEquatable<Nil>
{
public static readonly Nil Default = new Nil();
public override bool Equals(object obj)
{
return obj is Nil;
}
public bool Equals(Nil other)
{
return true;
}
public override int GetHashCode()
{
return 0;
}
public override string ToString()
{
return "()";
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Formatters
{
internal class NilFormatter : IMessagePackFormatter<Nil>
{
public static readonly IMessagePackFormatter<Nil> Instance = new NilFormatter();
NilFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Nil value, IFormatterResolver typeResolver)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
public Nil Deserialize(byte[] bytes, int offset, IFormatterResolver typeResolver, out int readSize)
{
return MessagePackBinary.ReadNil(bytes, offset, out readSize);
}
}
// NullableNil is same as Nil.
internal class NullableNilFormatter : IMessagePackFormatter<Nil?>
{
public static readonly IMessagePackFormatter<Nil?> Instance = new NullableNilFormatter();
NullableNilFormatter()
{
}
public int Serialize(ref byte[] bytes, int offset, Nil? value, IFormatterResolver typeResolver)
{
return MessagePackBinary.WriteNil(ref bytes, offset);
}
public Nil? Deserialize(byte[] bytes, int offset, IFormatterResolver typeResolver, out int readSize)
{
return MessagePackBinary.ReadNil(bytes, offset, out readSize);
}
}
}

View File

@ -0,0 +1,56 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using System;
using System.Reflection;
using System.Linq; // require UNITY_WSA
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
/// <summary>
/// Get formatter from [MessaegPackFromatter] attribute.
/// </summary>
internal sealed class AttributeFormatterResolver : IFormatterResolver
{
public static IFormatterResolver Instance = new AttributeFormatterResolver();
AttributeFormatterResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
#if UNITY_WSA && !NETFX_CORE
var attr = (MessagePackFormatterAttribute)typeof(T).GetCustomAttributes(typeof(MessagePackFormatterAttribute), true).FirstOrDefault();
#else
var attr = typeof(T).GetTypeInfo().GetCustomAttribute<MessagePackFormatterAttribute>();
#endif
if (attr == null)
{
return;
}
if (attr.Arguments == null)
{
formatter = (IMessagePackFormatter<T>)Activator.CreateInstance(attr.FormatterType);
}
else
{
formatter = (IMessagePackFormatter<T>)Activator.CreateInstance(attr.FormatterType, attr.Arguments);
}
}
}
}
}

View File

@ -0,0 +1,155 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using Datadog.Trace.Vendors.MessagePack.Resolvers;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class BuiltinResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new BuiltinResolver();
BuiltinResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
// Reduce IL2CPP code generate size(don't write long code in <T>)
formatter = (IMessagePackFormatter<T>)BuiltinResolverGetFormatterHelper.GetFormatter(typeof(T));
}
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class BuiltinResolverGetFormatterHelper
{
static readonly Dictionary<Type, object> formatterMap = new Dictionary<Type, object>()
{
// Primitive
{typeof(Int16), Int16Formatter.Instance},
{typeof(Int32), Int32Formatter.Instance},
{typeof(Int64), Int64Formatter.Instance},
{typeof(UInt16), UInt16Formatter.Instance},
{typeof(UInt32), UInt32Formatter.Instance},
{typeof(UInt64), UInt64Formatter.Instance},
{typeof(Single), SingleFormatter.Instance},
{typeof(Double), DoubleFormatter.Instance},
{typeof(bool), BooleanFormatter.Instance},
{typeof(byte), ByteFormatter.Instance},
{typeof(sbyte), SByteFormatter.Instance},
{typeof(DateTime), DateTimeFormatter.Instance},
{typeof(char), CharFormatter.Instance},
// Nulllable Primitive
{typeof(Nullable<Int16>), NullableInt16Formatter.Instance},
{typeof(Nullable<Int32>), NullableInt32Formatter.Instance},
{typeof(Nullable<Int64>), NullableInt64Formatter.Instance},
{typeof(Nullable<UInt16>), NullableUInt16Formatter.Instance},
{typeof(Nullable<UInt32>), NullableUInt32Formatter.Instance},
{typeof(Nullable<UInt64>), NullableUInt64Formatter.Instance},
{typeof(Nullable<Single>), NullableSingleFormatter.Instance},
{typeof(Nullable<Double>), NullableDoubleFormatter.Instance},
{typeof(Nullable<bool>), NullableBooleanFormatter.Instance},
{typeof(Nullable<byte>), NullableByteFormatter.Instance},
{typeof(Nullable<sbyte>), NullableSByteFormatter.Instance},
{typeof(Nullable<DateTime>), NullableDateTimeFormatter.Instance},
{typeof(Nullable<char>), NullableCharFormatter.Instance},
// StandardClassLibraryFormatter
{typeof(string), NullableStringFormatter.Instance},
{typeof(decimal), DecimalFormatter.Instance},
{typeof(decimal?), new StaticNullableFormatter<decimal>(DecimalFormatter.Instance)},
{typeof(TimeSpan), TimeSpanFormatter.Instance},
{typeof(TimeSpan?), new StaticNullableFormatter<TimeSpan>(TimeSpanFormatter.Instance)},
{typeof(DateTimeOffset), DateTimeOffsetFormatter.Instance},
{typeof(DateTimeOffset?), new StaticNullableFormatter<DateTimeOffset>(DateTimeOffsetFormatter.Instance)},
{typeof(Guid), GuidFormatter.Instance},
{typeof(Guid?), new StaticNullableFormatter<Guid>(GuidFormatter.Instance)},
{typeof(Uri), UriFormatter.Instance},
{typeof(Version), VersionFormatter.Instance},
{typeof(StringBuilder), StringBuilderFormatter.Instance},
{typeof(BitArray), BitArrayFormatter.Instance},
// special primitive
{typeof(byte[]), ByteArrayFormatter.Instance},
// Nil
{typeof(Nil), NilFormatter.Instance},
{typeof(Nil?), NullableNilFormatter.Instance},
// otpmitized primitive array formatter
{typeof(Int16[]), Int16ArrayFormatter.Instance},
{typeof(Int32[]), Int32ArrayFormatter.Instance},
{typeof(Int64[]), Int64ArrayFormatter.Instance},
{typeof(UInt16[]), UInt16ArrayFormatter.Instance},
{typeof(UInt32[]), UInt32ArrayFormatter.Instance},
{typeof(UInt64[]), UInt64ArrayFormatter.Instance},
{typeof(Single[]), SingleArrayFormatter.Instance},
{typeof(Double[]), DoubleArrayFormatter.Instance},
{typeof(Boolean[]), BooleanArrayFormatter.Instance},
{typeof(SByte[]), SByteArrayFormatter.Instance},
{typeof(DateTime[]), DateTimeArrayFormatter.Instance},
{typeof(Char[]), CharArrayFormatter.Instance},
{typeof(string[]), NullableStringArrayFormatter.Instance},
// well known collections
{typeof(List<Int16>), new ListFormatter<Int16>()},
{typeof(List<Int32>), new ListFormatter<Int32>()},
{typeof(List<Int64>), new ListFormatter<Int64>()},
{typeof(List<UInt16>), new ListFormatter<UInt16>()},
{typeof(List<UInt32>), new ListFormatter<UInt32>()},
{typeof(List<UInt64>), new ListFormatter<UInt64>()},
{typeof(List<Single>), new ListFormatter<Single>()},
{typeof(List<Double>), new ListFormatter<Double>()},
{typeof(List<Boolean>), new ListFormatter<Boolean>()},
{typeof(List<byte>), new ListFormatter<byte>()},
{typeof(List<SByte>), new ListFormatter<SByte>()},
{typeof(List<DateTime>), new ListFormatter<DateTime>()},
{typeof(List<Char>), new ListFormatter<Char>()},
{typeof(List<string>), new ListFormatter<string>()},
{ typeof(ArraySegment<byte>), ByteArraySegmentFormatter.Instance },
{ typeof(ArraySegment<byte>?),new StaticNullableFormatter<ArraySegment<byte>>(ByteArraySegmentFormatter.Instance) },
#if NETSTANDARD || NETFRAMEWORK
{typeof(System.Numerics.BigInteger), BigIntegerFormatter.Instance},
{typeof(System.Numerics.BigInteger?), new StaticNullableFormatter<System.Numerics.BigInteger>(BigIntegerFormatter.Instance)},
{typeof(System.Numerics.Complex), ComplexFormatter.Instance},
{typeof(System.Numerics.Complex?), new StaticNullableFormatter<System.Numerics.Complex>(ComplexFormatter.Instance)},
{typeof(System.Threading.Tasks.Task), TaskUnitFormatter.Instance},
#endif
};
internal static object GetFormatter(Type t)
{
object formatter;
if (formatterMap.TryGetValue(t, out formatter))
{
return formatter;
}
return null;
}
}
}

View File

@ -0,0 +1,111 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using System;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class CompositeResolver : IFormatterResolver
{
public static readonly CompositeResolver Instance = new CompositeResolver();
static bool isFreezed = false;
static IMessagePackFormatter[] formatters = new IMessagePackFormatter[0];
static IFormatterResolver[] resolvers = new IFormatterResolver[0];
CompositeResolver()
{
}
public static void Register(params IFormatterResolver[] resolvers)
{
if (isFreezed)
{
throw new InvalidOperationException("Register must call on startup(before use GetFormatter<T>).");
}
CompositeResolver.resolvers = resolvers;
}
public static void Register(params IMessagePackFormatter[] formatters)
{
if (isFreezed)
{
throw new InvalidOperationException("Register must call on startup(before use GetFormatter<T>).");
}
CompositeResolver.formatters = formatters;
}
public static void Register(IMessagePackFormatter[] formatters, IFormatterResolver[] resolvers)
{
if (isFreezed)
{
throw new InvalidOperationException("Register must call on startup(before use GetFormatter<T>).");
}
CompositeResolver.resolvers = resolvers;
CompositeResolver.formatters = formatters;
}
public static void RegisterAndSetAsDefault(params IFormatterResolver[] resolvers)
{
Register(resolvers);
MessagePack.MessagePackSerializer.SetDefaultResolver(CompositeResolver.Instance);
}
public static void RegisterAndSetAsDefault(params IMessagePackFormatter[] formatters)
{
Register(formatters);
MessagePack.MessagePackSerializer.SetDefaultResolver(CompositeResolver.Instance);
}
public static void RegisterAndSetAsDefault(IMessagePackFormatter[] formatters, IFormatterResolver[] resolvers)
{
Register(formatters);
Register(resolvers);
MessagePack.MessagePackSerializer.SetDefaultResolver(CompositeResolver.Instance);
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
isFreezed = true;
foreach (var item in formatters)
{
foreach (var implInterface in item.GetType().GetTypeInfo().ImplementedInterfaces)
{
var ti = implInterface.GetTypeInfo();
if (ti.IsGenericType && ti.GenericTypeArguments[0] == typeof(T))
{
formatter = (IMessagePackFormatter<T>)item;
return;
}
}
}
foreach (var item in resolvers)
{
var f = item.GetFormatter<T>();
if (f != null)
{
formatter = f;
return;
}
}
}
}
}
}

View File

@ -0,0 +1,309 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if NETSTANDARD || NETFRAMEWORK
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.Collections.Generic;
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
// MEMO: Not completely implemented.
//internal static class ContractlessReflectionObjectResolver
//{
// // TODO:CamelCase Option? AllowPrivate?
// public static readonly IFormatterResolver Default = new DefaultResolver();
// public static readonly IFormatterResolver Contractless = new ContractlessResolver();
// public static readonly IFormatterResolver ContractlessForceStringKey = new ContractlessForceStringResolver();
// class DefaultResolver : IFormatterResolver
// {
// const bool ForceStringKey = false;
// const bool Contractless = false;
// const bool AllowPrivate = false;
// public IMessagePackFormatter<T> GetFormatter<T>()
// {
// return Cache<T>.formatter;
// }
// static class Cache<T>
// {
// public static readonly IMessagePackFormatter<T> formatter;
// static Cache()
// {
// var metaInfo = ObjectSerializationInfo.CreateOrNull(typeof(T), ForceStringKey, Contractless, AllowPrivate);
// if (metaInfo != null)
// {
// formatter = new ReflectionObjectFormatter<T>(metaInfo);
// }
// }
// }
// }
// class ContractlessResolver : IFormatterResolver
// {
// const bool ForceStringKey = false;
// const bool Contractless = true;
// const bool AllowPrivate = false;
// public IMessagePackFormatter<T> GetFormatter<T>()
// {
// return Cache<T>.formatter;
// }
// static class Cache<T>
// {
// public static readonly IMessagePackFormatter<T> formatter;
// static Cache()
// {
// var metaInfo = ObjectSerializationInfo.CreateOrNull(typeof(T), ForceStringKey, Contractless, AllowPrivate);
// if (metaInfo != null)
// {
// formatter = new ReflectionObjectFormatter<T>(metaInfo);
// }
// }
// }
// }
// class ContractlessForceStringResolver : IFormatterResolver
// {
// const bool ForceStringKey = true;
// const bool Contractless = true;
// const bool AllowPrivate = false;
// public IMessagePackFormatter<T> GetFormatter<T>()
// {
// return Cache<T>.formatter;
// }
// static class Cache<T>
// {
// public static readonly IMessagePackFormatter<T> formatter;
// static Cache()
// {
// var metaInfo = ObjectSerializationInfo.CreateOrNull(typeof(T), ForceStringKey, Contractless, AllowPrivate);
// if (metaInfo != null)
// {
// formatter = new ReflectionObjectFormatter<T>(metaInfo);
// }
// }
// }
// }
//}
//internal class ReflectionObjectFormatter<T> : IMessagePackFormatter<T>
//{
// readonly ObjectSerializationInfo metaInfo;
// // for write
// readonly byte[][] writeMemberNames;
// readonly ObjectSerializationInfo.EmittableMember[] writeMembers;
// // for read
// readonly int[] constructorParameterIndexes;
// readonly AutomataDictionary mapMemberDictionary;
// readonly ObjectSerializationInfo.EmittableMember[] readMembers;
// internal ReflectionObjectFormatter(ObjectSerializationInfo metaInfo)
// {
// this.metaInfo = metaInfo;
// // for write
// {
// var memberNameList = new List<byte[]>(metaInfo.Members.Length);
// var emmitableMemberList = new List<ObjectSerializationInfo.EmittableMember>(metaInfo.Members.Length);
// foreach (var item in metaInfo.Members)
// {
// if (item.IsWritable)
// {
// emmitableMemberList.Add(item);
// memberNameList.Add(Encoding.UTF8.GetBytes(item.Name));
// }
// }
// this.writeMemberNames = memberNameList.ToArray();
// this.writeMembers = emmitableMemberList.ToArray();
// }
// // for read
// {
// var automata = new AutomataDictionary();
// var emmitableMemberList = new List<ObjectSerializationInfo.EmittableMember>(metaInfo.Members.Length);
// int index = 0;
// foreach (var item in metaInfo.Members)
// {
// if (item.IsReadable)
// {
// emmitableMemberList.Add(item);
// automata.Add(item.Name, index++);
// }
// }
// this.readMembers = emmitableMemberList.ToArray();
// this.mapMemberDictionary = automata;
// }
// }
// public int Serialize(ref byte[] bytes, int offset, T value, IFormatterResolver formatterResolver)
// {
// // reduce generic method size, avoid write code in <T> type.
// if (metaInfo.IsIntKey)
// {
// return ReflectionObjectFormatterHelper.WriteArraySerialize(metaInfo, writeMembers, ref bytes, offset, value, formatterResolver);
// }
// else
// {
// return ReflectionObjectFormatterHelper.WriteMapSerialize(metaInfo, writeMembers, writeMemberNames, ref bytes, offset, value, formatterResolver);
// }
// }
// public T Deserialize(byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
// {
// return (T)ReflectionObjectFormatterHelper.Deserialize(metaInfo, readMembers, constructorParameterIndexes, mapMemberDictionary, bytes, offset, formatterResolver, out readSize);
// }
//}
//internal static class ReflectionObjectFormatterHelper
//{
// internal static int WriteArraySerialize(ObjectSerializationInfo metaInfo, ObjectSerializationInfo.EmittableMember[] writeMembers, ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver)
// {
// var startOffset = offset;
// offset += MessagePackBinary.WriteArrayHeader(ref bytes, offset, writeMembers.Length);
// foreach (var item in metaInfo.Members)
// {
// if (item == null)
// {
// offset += MessagePackBinary.WriteNil(ref bytes, offset);
// }
// else
// {
// var memberValue = item.ReflectionLoadValue(value);
// offset += MessagePackSerializer.NonGeneric.Serialize(item.Type, ref bytes, offset, memberValue, formatterResolver);
// }
// }
// return offset - startOffset;
// }
// internal static int WriteMapSerialize(ObjectSerializationInfo metaInfo, ObjectSerializationInfo.EmittableMember[] writeMembers, byte[][] memberNames, ref byte[] bytes, int offset, object value, IFormatterResolver formatterResolver)
// {
// var startOffset = offset;
// offset += MessagePackBinary.WriteMapHeader(ref bytes, offset, writeMembers.Length);
// for (int i = 0; i < writeMembers.Length; i++)
// {
// offset += MessagePackBinary.WriteStringBytes(ref bytes, offset, memberNames[i]);
// var memberValue = writeMembers[i].ReflectionLoadValue(value);
// offset += MessagePackSerializer.NonGeneric.Serialize(writeMembers[i].Type, ref bytes, offset, memberValue, formatterResolver);
// }
// return offset - startOffset;
// }
// internal static object Deserialize(ObjectSerializationInfo metaInfo, ObjectSerializationInfo.EmittableMember[] readMembers, int[] constructorParameterIndexes, AutomataDictionary mapMemberDictionary, byte[] bytes, int offset, IFormatterResolver formatterResolver, out int readSize)
// {
// var startOffset = offset;
// object[] parameters = null;
// var headerType = MessagePackBinary.GetMessagePackType(bytes, offset);
// if (headerType == MessagePackType.Nil)
// {
// readSize = 1;
// return null;
// }
// else if (headerType == MessagePackType.Array)
// {
// var arraySize = MessagePackBinary.ReadArrayHeader(bytes, offset, out readSize);
// offset += readSize;
// // ReadValues
// parameters = new object[arraySize];
// for (int i = 0; i < arraySize; i++)
// {
// var info = readMembers[i];
// if (info != null)
// {
// parameters[i] = MessagePackSerializer.NonGeneric.Deserialize(info.Type, bytes, offset, formatterResolver, out readSize);
// offset += readSize;
// }
// else
// {
// offset += MessagePackBinary.ReadNextBlock(bytes, offset);
// }
// }
// }
// else if (headerType == MessagePackType.Map)
// {
// var mapSize = MessagePackBinary.ReadMapHeader(bytes, offset, out readSize);
// offset += readSize;
// // ReadValues
// parameters = new object[mapSize];
// for (int i = 0; i < mapSize; i++)
// {
// var rawPropName = MessagePackBinary.ReadStringSegment(bytes, offset, out readSize);
// offset += readSize;
// int index;
// if (mapMemberDictionary.TryGetValue(rawPropName.Array, rawPropName.Offset, rawPropName.Count, out index))
// {
// var info = readMembers[index];
// parameters[index] = MessagePackSerializer.NonGeneric.Deserialize(info.Type, bytes, offset, formatterResolver, out readSize);
// offset += readSize;
// }
// else
// {
// offset += MessagePackBinary.ReadNextBlock(bytes, offset);
// }
// }
// }
// else
// {
// throw new InvalidOperationException("Invalid MessagePackType:" + MessagePackCode.ToFormatName(bytes[offset]));
// }
// // CreateObject
// object result = null;
// if (constructorParameterIndexes.Length == 0)
// {
// result = Activator.CreateInstance(metaInfo.Type);
// }
// else
// {
// var args = new object[constructorParameterIndexes.Length];
// for (int i = 0; i < constructorParameterIndexes.Length; i++)
// {
// args[i] = parameters[constructorParameterIndexes[i]];
// }
// result = Activator.CreateInstance(metaInfo.Type, args);
// }
// // SetMembers
// for (int i = 0; i < readMembers.Length; i++)
// {
// var info = readMembers[i];
// if (info != null)
// {
// info.ReflectionStoreValue(result, parameters[i]);
// }
// }
// readSize = offset - startOffset;
// return result;
// }
//}
}
#endif

View File

@ -0,0 +1,64 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if !UNITY_WSA
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.Reflection;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class DynamicEnumAsStringResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new DynamicEnumAsStringResolver();
DynamicEnumAsStringResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
var ti = typeof(T).GetTypeInfo();
if (ti.IsNullable())
{
// build underlying type and use wrapped formatter.
ti = ti.GenericTypeArguments[0].GetTypeInfo();
if (!ti.IsEnum)
{
return;
}
var innerFormatter = DynamicEnumAsStringResolver.Instance.GetFormatterDynamic(ti.AsType());
if (innerFormatter == null)
{
return;
}
formatter = (IMessagePackFormatter<T>)Activator.CreateInstance(typeof(StaticNullableFormatter<>).MakeGenericType(ti.AsType()), new object[] { innerFormatter });
return;
}
else if (!ti.IsEnum)
{
return;
}
formatter = (IMessagePackFormatter<T>)(object)new EnumAsStringFormatter<T>();
}
}
}
}
#endif

View File

@ -0,0 +1,266 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
#if !UNITY_WSA
using Datadog.Trace.Vendors.MessagePack.Formatters;
using System.Linq;
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Collections.ObjectModel;
using System.Collections;
#if NETSTANDARD || NETFRAMEWORK
using System.Threading.Tasks;
#endif
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class DynamicGenericResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new DynamicGenericResolver();
DynamicGenericResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
formatter = (IMessagePackFormatter<T>)DynamicGenericResolverGetFormatterHelper.GetFormatter(typeof(T));
}
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class DynamicGenericResolverGetFormatterHelper
{
static readonly Dictionary<Type, Type> formatterMap = new Dictionary<Type, Type>()
{
{typeof(List<>), typeof(ListFormatter<>)},
{typeof(LinkedList<>), typeof(LinkedListFormatter<>)},
{typeof(Queue<>), typeof(QeueueFormatter<>)},
{typeof(Stack<>), typeof(StackFormatter<>)},
{typeof(HashSet<>), typeof(HashSetFormatter<>)},
{typeof(ReadOnlyCollection<>), typeof(ReadOnlyCollectionFormatter<>)},
{typeof(IList<>), typeof(InterfaceListFormatter<>)},
{typeof(ICollection<>), typeof(InterfaceCollectionFormatter<>)},
{typeof(IEnumerable<>), typeof(InterfaceEnumerableFormatter<>)},
{typeof(Dictionary<,>), typeof(DictionaryFormatter<,>)},
{typeof(IDictionary<,>), typeof(InterfaceDictionaryFormatter<,>)},
{typeof(SortedDictionary<,>), typeof(SortedDictionaryFormatter<,>)},
{typeof(SortedList<,>), typeof(SortedListFormatter<,>)},
{typeof(ILookup<,>), typeof(InterfaceLookupFormatter<,>)},
{typeof(IGrouping<,>), typeof(InterfaceGroupingFormatter<,>)},
#if NETSTANDARD || NETFRAMEWORK
{typeof(ObservableCollection<>), typeof(ObservableCollectionFormatter<>)},
{typeof(ReadOnlyObservableCollection<>),(typeof(ReadOnlyObservableCollectionFormatter<>))},
{typeof(IReadOnlyList<>), typeof(InterfaceReadOnlyListFormatter<>)},
{typeof(IReadOnlyCollection<>), typeof(InterfaceReadOnlyCollectionFormatter<>)},
{typeof(ISet<>), typeof(InterfaceSetFormatter<>)},
{typeof(System.Collections.Concurrent.ConcurrentBag<>), typeof(ConcurrentBagFormatter<>)},
{typeof(System.Collections.Concurrent.ConcurrentQueue<>), typeof(ConcurrentQueueFormatter<>)},
{typeof(System.Collections.Concurrent.ConcurrentStack<>), typeof(ConcurrentStackFormatter<>)},
{typeof(ReadOnlyDictionary<,>), typeof(ReadOnlyDictionaryFormatter<,>)},
{typeof(IReadOnlyDictionary<,>), typeof(InterfaceReadOnlyDictionaryFormatter<,>)},
{typeof(System.Collections.Concurrent.ConcurrentDictionary<,>), typeof(ConcurrentDictionaryFormatter<,>)},
{typeof(Lazy<>), typeof(LazyFormatter<>)},
{typeof(Task<>), typeof(TaskValueFormatter<>)},
#endif
};
// Reduce IL2CPP code generate size(don't write long code in <T>)
internal static object GetFormatter(Type t)
{
var ti = t.GetTypeInfo();
if (t.IsArray)
{
var rank = t.GetArrayRank();
if (rank == 1)
{
if (t.GetElementType() == typeof(byte)) // byte[] is also supported in builtin formatter.
{
return ByteArrayFormatter.Instance;
}
return Activator.CreateInstance(typeof(ArrayFormatter<>).MakeGenericType(t.GetElementType()));
}
else if (rank == 2)
{
return Activator.CreateInstance(typeof(TwoDimentionalArrayFormatter<>).MakeGenericType(t.GetElementType()));
}
else if (rank == 3)
{
return Activator.CreateInstance(typeof(ThreeDimentionalArrayFormatter<>).MakeGenericType(t.GetElementType()));
}
else if (rank == 4)
{
return Activator.CreateInstance(typeof(FourDimentionalArrayFormatter<>).MakeGenericType(t.GetElementType()));
}
else
{
return null; // not supported built-in
}
}
else if (ti.IsGenericType)
{
var genericType = ti.GetGenericTypeDefinition();
var genericTypeInfo = genericType.GetTypeInfo();
var isNullable = genericTypeInfo.IsNullable();
var nullableElementType = isNullable ? ti.GenericTypeArguments[0] : null;
if (genericType == typeof(KeyValuePair<,>))
{
return CreateInstance(typeof(KeyValuePairFormatter<,>), ti.GenericTypeArguments);
}
else if (isNullable && nullableElementType.GetTypeInfo().IsConstructedGenericType() && nullableElementType.GetGenericTypeDefinition() == typeof(KeyValuePair<,>))
{
return CreateInstance(typeof(NullableFormatter<>), new[] { nullableElementType });
}
#if NETSTANDARD || NETFRAMEWORK
// ValueTask
// Deleted, unneeded for Datadog.Trace
// Tuple
else if (ti.FullName.StartsWith("System.Tuple"))
{
Type tupleFormatterType = null;
switch (ti.GenericTypeArguments.Length)
{
case 1:
tupleFormatterType = typeof(TupleFormatter<>);
break;
case 2:
tupleFormatterType = typeof(TupleFormatter<,>);
break;
case 3:
tupleFormatterType = typeof(TupleFormatter<,,>);
break;
case 4:
tupleFormatterType = typeof(TupleFormatter<,,,>);
break;
case 5:
tupleFormatterType = typeof(TupleFormatter<,,,,>);
break;
case 6:
tupleFormatterType = typeof(TupleFormatter<,,,,,>);
break;
case 7:
tupleFormatterType = typeof(TupleFormatter<,,,,,,>);
break;
case 8:
tupleFormatterType = typeof(TupleFormatter<,,,,,,,>);
break;
default:
break;
}
return CreateInstance(tupleFormatterType, ti.GenericTypeArguments);
}
// ValueTuple
// Deleted, unneeded for Datadog.Trace
#endif
// ArraySegement
else if (genericType == typeof(ArraySegment<>))
{
if (ti.GenericTypeArguments[0] == typeof(byte))
{
return ByteArraySegmentFormatter.Instance;
}
else
{
return CreateInstance(typeof(ArraySegmentFormatter<>), ti.GenericTypeArguments);
}
}
else if (isNullable && nullableElementType.GetTypeInfo().IsConstructedGenericType() && nullableElementType.GetGenericTypeDefinition() == typeof(ArraySegment<>))
{
if (nullableElementType == typeof(ArraySegment<byte>))
{
return new StaticNullableFormatter<ArraySegment<byte>>(ByteArraySegmentFormatter.Instance);
}
else
{
return CreateInstance(typeof(NullableFormatter<>), new[] { nullableElementType });
}
}
// Mapped formatter
else
{
Type formatterType;
if (formatterMap.TryGetValue(genericType, out formatterType))
{
return CreateInstance(formatterType, ti.GenericTypeArguments);
}
// generic collection
else if (ti.GenericTypeArguments.Length == 1
&& ti.ImplementedInterfaces.Any(x => x.GetTypeInfo().IsConstructedGenericType() && x.GetGenericTypeDefinition() == typeof(ICollection<>))
&& ti.DeclaredConstructors.Any(x => x.GetParameters().Length == 0))
{
var elemType = ti.GenericTypeArguments[0];
return CreateInstance(typeof(GenericCollectionFormatter<,>), new[] { elemType, t });
}
// generic dictionary
else if (ti.GenericTypeArguments.Length == 2
&& ti.ImplementedInterfaces.Any(x => x.GetTypeInfo().IsConstructedGenericType() && x.GetGenericTypeDefinition() == typeof(IDictionary<,>))
&& ti.DeclaredConstructors.Any(x => x.GetParameters().Length == 0))
{
var keyType = ti.GenericTypeArguments[0];
var valueType = ti.GenericTypeArguments[1];
return CreateInstance(typeof(GenericDictionaryFormatter<,,>), new[] { keyType, valueType, t });
}
}
}
else
{
// NonGeneric Collection
if (t == typeof(IList))
{
return NonGenericInterfaceListFormatter.Instance;
}
else if (t == typeof(IDictionary))
{
return NonGenericInterfaceDictionaryFormatter.Instance;
}
if (typeof(IList).GetTypeInfo().IsAssignableFrom(ti) && ti.DeclaredConstructors.Any(x => x.GetParameters().Length == 0))
{
return Activator.CreateInstance(typeof(NonGenericListFormatter<>).MakeGenericType(t));
}
else if (typeof(IDictionary).GetTypeInfo().IsAssignableFrom(ti) && ti.DeclaredConstructors.Any(x => x.GetParameters().Length == 0))
{
return Activator.CreateInstance(typeof(NonGenericDictionaryFormatter<>).MakeGenericType(t));
}
}
return null;
}
static object CreateInstance(Type genericType, Type[] genericTypeArguments, params object[] arguments)
{
return Activator.CreateInstance(genericType.MakeGenericType(genericTypeArguments), arguments);
}
}
}
#endif

View File

@ -0,0 +1,59 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class NativeDateTimeResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new NativeDateTimeResolver();
NativeDateTimeResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
formatter = (IMessagePackFormatter<T>)NativeDateTimeResolverGetFormatterHelper.GetFormatter(typeof(T));
}
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class NativeDateTimeResolverGetFormatterHelper
{
internal static object GetFormatter(Type t)
{
if (t == typeof(DateTime))
{
return NativeDateTimeFormatter.Instance;
}
else if (t == typeof(DateTime?))
{
return new StaticNullableFormatter<DateTime>(NativeDateTimeFormatter.Instance);
}
else if (t == typeof(DateTime[]))
{
return NativeDateTimeArrayFormatter.Instance;
}
return null;
}
}
}

View File

@ -0,0 +1,59 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using Datadog.Trace.Vendors.MessagePack.Internal;
using System;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class OldSpecResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new OldSpecResolver();
OldSpecResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
formatter = (IMessagePackFormatter<T>)OldSpecResolverGetFormatterHelper.GetFormatter(typeof(T));
}
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class OldSpecResolverGetFormatterHelper
{
internal static object GetFormatter(Type t)
{
if (t == typeof(string))
{
return OldSpecStringFormatter.Instance;
}
else if (t == typeof(string[]))
{
return new ArrayFormatter<string>();
}
else if (t == typeof(byte[]))
{
return OldSpecBinaryFormatter.Instance;
}
return null;
}
}
}

View File

@ -0,0 +1,69 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
internal sealed class PrimitiveObjectResolver : IFormatterResolver
{
public static IFormatterResolver Instance = new PrimitiveObjectResolver();
PrimitiveObjectResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
formatter = (typeof(T) == typeof(object))
? (IMessagePackFormatter<T>)(object)PrimitiveObjectFormatter.Instance
: null;
}
}
}
//#if NETSTANDARD || NETFRAMEWORK
// /// <summary>
// /// In `object`, when serializing resolve by concrete type and when deserializing use primitive.
// /// </summary>
// internal sealed class DynamicObjectTypeFallbackResolver : IFormatterResolver
// {
// public static IFormatterResolver Instance = new DynamicObjectTypeFallbackResolver();
// DynamicObjectTypeFallbackResolver()
// {
// }
// public IMessagePackFormatter<T> GetFormatter<T>()
// {
// return FormatterCache<T>.formatter;
// }
// static class FormatterCache<T>
// {
// public static readonly IMessagePackFormatter<T> formatter;
// static FormatterCache()
// {
// formatter = (typeof(T) == typeof(object))
// ? (IMessagePackFormatter<T>)(object)DynamicObjectTypeFallbackFormatter.Instance
// : null;
// }
// }
// }
//#endif
}

View File

@ -0,0 +1,349 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using Datadog.Trace.Vendors.MessagePack.Formatters;
using System.Linq;
using Datadog.Trace.Vendors.MessagePack.Internal;
using Datadog.Trace.Vendors.MessagePack.Resolvers;
namespace Datadog.Trace.Vendors.MessagePack.Resolvers
{
/// <summary>
/// Default composited resolver, builtin -> attribute -> dynamic enum -> dynamic generic -> dynamic union -> dynamic object -> primitive.
/// </summary>
internal sealed class StandardResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new StandardResolver();
#if NETSTANDARD || NETFRAMEWORK
public static readonly IMessagePackFormatter<object> ObjectFallbackFormatter = new DynamicObjectTypeFallbackFormatter(StandardResolverCore.Instance);
#endif
StandardResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
if (typeof(T) == typeof(object))
{
// final fallback
#if NETSTANDARD || NETFRAMEWORK
formatter = (IMessagePackFormatter<T>)ObjectFallbackFormatter;
#else
formatter = PrimitiveObjectResolver.Instance.GetFormatter<T>();
#endif
}
else
{
formatter = StandardResolverCore.Instance.GetFormatter<T>();
}
}
}
}
internal sealed class ContractlessStandardResolver : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new ContractlessStandardResolver();
#if NETSTANDARD || NETFRAMEWORK
public static readonly IMessagePackFormatter<object> ObjectFallbackFormatter = new DynamicObjectTypeFallbackFormatter(ContractlessStandardResolverCore.Instance);
#endif
ContractlessStandardResolver()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
if (typeof(T) == typeof(object))
{
// final fallback
#if NETSTANDARD || NETFRAMEWORK
formatter = (IMessagePackFormatter<T>)ObjectFallbackFormatter;
#else
formatter = PrimitiveObjectResolver.Instance.GetFormatter<T>();
#endif
}
else
{
formatter = ContractlessStandardResolverCore.Instance.GetFormatter<T>();
}
}
}
}
internal sealed class StandardResolverAllowPrivate : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new StandardResolverAllowPrivate();
#if NETSTANDARD || NETFRAMEWORK
public static readonly IMessagePackFormatter<object> ObjectFallbackFormatter = new DynamicObjectTypeFallbackFormatter(StandardResolverAllowPrivateCore.Instance);
#endif
StandardResolverAllowPrivate()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
if (typeof(T) == typeof(object))
{
// final fallback
#if NETSTANDARD || NETFRAMEWORK
formatter = (IMessagePackFormatter<T>)ObjectFallbackFormatter;
#else
formatter = PrimitiveObjectResolver.Instance.GetFormatter<T>();
#endif
}
else
{
formatter = StandardResolverAllowPrivateCore.Instance.GetFormatter<T>();
}
}
}
}
internal sealed class ContractlessStandardResolverAllowPrivate : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new ContractlessStandardResolverAllowPrivate();
#if NETSTANDARD || NETFRAMEWORK
public static readonly IMessagePackFormatter<object> ObjectFallbackFormatter = new DynamicObjectTypeFallbackFormatter(ContractlessStandardResolverAllowPrivateCore.Instance);
#endif
ContractlessStandardResolverAllowPrivate()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
if (typeof(T) == typeof(object))
{
// final fallback
#if NETSTANDARD || NETFRAMEWORK
formatter = (IMessagePackFormatter<T>)ObjectFallbackFormatter;
#else
formatter = PrimitiveObjectResolver.Instance.GetFormatter<T>();
#endif
}
else
{
formatter = ContractlessStandardResolverAllowPrivateCore.Instance.GetFormatter<T>();
}
}
}
}
}
namespace Datadog.Trace.Vendors.MessagePack.Internal
{
internal static class StandardResolverHelper
{
public static readonly IFormatterResolver[] DefaultResolvers = new[]
{
BuiltinResolver.Instance, // Try Builtin
AttributeFormatterResolver.Instance, // Try use [MessagePackFormatter]
#if !(NETSTANDARD || NETFRAMEWORK)
MessagePack.Unity.UnityResolver.Instance,
#endif
#if !ENABLE_IL2CPP && !UNITY_WSA && !NET_STANDARD_2_0
DynamicGenericResolver.Instance, // Try Array, Tuple, Collection
#endif
};
}
internal sealed class StandardResolverCore : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new StandardResolverCore();
static readonly IFormatterResolver[] resolvers = StandardResolverHelper.DefaultResolvers.Concat(new IFormatterResolver[]
{
#if !ENABLE_IL2CPP && !UNITY_WSA && !NET_STANDARD_2_0
#endif
}).ToArray();
StandardResolverCore()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
foreach (var item in resolvers)
{
var f = item.GetFormatter<T>();
if (f != null)
{
formatter = f;
return;
}
}
}
}
}
internal sealed class ContractlessStandardResolverCore : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new ContractlessStandardResolverCore();
static readonly IFormatterResolver[] resolvers = StandardResolverHelper.DefaultResolvers.Concat(new IFormatterResolver[]
{
#if !ENABLE_IL2CPP && !UNITY_WSA && !NET_STANDARD_2_0
#endif
}).ToArray();
ContractlessStandardResolverCore()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
foreach (var item in resolvers)
{
var f = item.GetFormatter<T>();
if (f != null)
{
formatter = f;
return;
}
}
}
}
}
internal sealed class StandardResolverAllowPrivateCore : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new StandardResolverAllowPrivateCore();
static readonly IFormatterResolver[] resolvers = StandardResolverHelper.DefaultResolvers.Concat(new IFormatterResolver[]
{
#if !ENABLE_IL2CPP && !UNITY_WSA && !NET_STANDARD_2_0
#endif
}).ToArray();
StandardResolverAllowPrivateCore()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
foreach (var item in resolvers)
{
var f = item.GetFormatter<T>();
if (f != null)
{
formatter = f;
return;
}
}
}
}
}
internal sealed class ContractlessStandardResolverAllowPrivateCore : IFormatterResolver
{
public static readonly IFormatterResolver Instance = new ContractlessStandardResolverAllowPrivateCore();
static readonly IFormatterResolver[] resolvers = StandardResolverHelper.DefaultResolvers.Concat(new IFormatterResolver[]
{
#if !ENABLE_IL2CPP && !UNITY_WSA && !NET_STANDARD_2_0
#endif
}).ToArray();
ContractlessStandardResolverAllowPrivateCore()
{
}
public IMessagePackFormatter<T> GetFormatter<T>()
{
return FormatterCache<T>.formatter;
}
static class FormatterCache<T>
{
public static readonly IMessagePackFormatter<T> formatter;
static FormatterCache()
{
foreach (var item in resolvers)
{
var f = item.GetFormatter<T>();
if (f != null)
{
formatter = f;
return;
}
}
}
}
}
}

View File

@ -0,0 +1,13 @@
//------------------------------------------------------------------------------
// <auto-generated />
// This file was automatically generated by the UpdateVendors tool.
//------------------------------------------------------------------------------
using System.Text;
namespace Datadog.Trace.Vendors.MessagePack
{
internal static class StringEncoding
{
public static readonly Encoding UTF8 = new UTF8Encoding(false);
}
}

View File

@ -44,6 +44,12 @@ namespace UpdateVendors
branchDownload: "https://github.com/DataDog/dogstatsd-csharp-client/archive/3.3.0.zip",
pathToSrc: new[] { "dogstatsd-csharp-client-3.3.0", "src", "StatsdClient" },
transform: filePath => RewriteCsFileWithStandardTransform(filePath, originalNamespace: "StatsdClient"));
UpdateVendor(
libraryName: "MessagePack",
branchDownload: "https://github.com/neuecc/MessagePack-CSharp/archive/v1.9.3.zip",
pathToSrc: new[] { "MessagePack-CSharp-1.9.3", "src", "MessagePack" },
transform: filePath => RewriteCsFileWithStandardTransform(filePath, originalNamespace: "MessagePack"));
}
private static void RewriteCsFileWithStandardTransform(string filePath, string originalNamespace, Func<string, string, string> extraTransform = null)