diff --git a/src/Block.cs b/src/Block.cs
index 84f804e..88ef86b 100644
--- a/src/Block.cs
+++ b/src/Block.cs
@@ -1,23 +1,19 @@
-using System.Runtime.Serialization;
+using System.Runtime.Serialization;
+using Newtonsoft.Json;
namespace Ipfs.Http
{
///
[DataContract]
- public class Block : IDataBlock
+ public record Block : IBlockStat
{
- ///
- /// The data of the block.
- ///
- public byte[] DataBytes { get; set; }
-
///
[DataMember]
+ [JsonProperty("Key")]
public required Cid Id { get; set; }
///
[DataMember]
- public required long Size { get; set; }
+ public required int Size { get; set; }
}
-
}
diff --git a/src/CoreApi/BlockApi.cs b/src/CoreApi/BlockApi.cs
index 7013eea..851f43e 100644
--- a/src/CoreApi/BlockApi.cs
+++ b/src/CoreApi/BlockApi.cs
@@ -1,11 +1,12 @@
using Ipfs.CoreApi;
using Newtonsoft.Json.Linq;
-using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
+#nullable enable
+
namespace Ipfs.Http
{
class BlockApi : IBlockApi
@@ -17,92 +18,89 @@ internal BlockApi(IpfsClient ipfs)
this.ipfs = ipfs;
}
- public async Task GetAsync(Cid id, CancellationToken cancel = default(CancellationToken))
+ public async Task GetAsync(Cid id, CancellationToken cancel = default)
{
return await ipfs.DownloadBytesAsync("block/get", cancel, id);
}
- public async Task PutAsync(
+ public async Task PutAsync(
byte[] data,
- string contentType = Cid.DefaultContentType,
- string multiHash = MultiHash.DefaultAlgorithmName,
- string encoding = MultiBase.DefaultAlgorithmName,
- bool pin = false,
- CancellationToken cancel = default(CancellationToken))
+ string cidCodec = "raw",
+ MultiHash? hash = null,
+ bool? pin = null,
+ bool? allowBigBlock = null,
+ CancellationToken cancel = default)
{
- var options = new List();
- if (multiHash != MultiHash.DefaultAlgorithmName ||
- contentType != Cid.DefaultContentType ||
- encoding != MultiBase.DefaultAlgorithmName)
- {
- options.Add($"mhtype={multiHash}");
- options.Add($"format={contentType}");
- options.Add($"cid-base={encoding}");
- }
- var json = await ipfs.UploadAsync("block/put", cancel, data, options.ToArray());
- var info = JObject.Parse(json);
- Cid cid = (string)info["Key"];
-
- if (pin)
- {
- await ipfs.Pin.AddAsync(cid, recursive: false, cancel: cancel);
- }
-
- return cid;
+ using var stream = new MemoryStream(data);
+ return await PutAsync(stream, cidCodec, hash, pin, allowBigBlock, cancel);
}
- public async Task PutAsync(
+ public async Task PutAsync(
Stream data,
- string contentType = Cid.DefaultContentType,
- string multiHash = MultiHash.DefaultAlgorithmName,
- string encoding = MultiBase.DefaultAlgorithmName,
- bool pin = false,
- CancellationToken cancel = default(CancellationToken))
+ string cidCodec = "raw",
+ MultiHash? hash = null,
+ bool? pin = null,
+ bool? allowBigBlock = null,
+ CancellationToken cancel = default)
{
- var options = new List();
- if (multiHash != MultiHash.DefaultAlgorithmName ||
- contentType != Cid.DefaultContentType ||
- encoding != MultiBase.DefaultAlgorithmName)
- {
- options.Add($"mhtype={multiHash}");
- options.Add($"format={contentType}");
- options.Add($"cid-base={encoding}");
- }
- var json = await ipfs.UploadAsync("block/put", cancel, data, null, options.ToArray());
- var info = JObject.Parse(json);
- Cid cid = (string)info["Key"];
-
- if (pin)
- {
- await ipfs.Pin.AddAsync(cid, recursive: false, cancel: cancel);
- }
+ string[] options = [
+ $"cid-codec={cidCodec}"
+ ];
- return cid;
+ if (hash != null)
+ options = [.. options, $"mhtype={hash}", $"mhlen={hash.Algorithm.DigestSize}"];
+
+ if (pin != null)
+ options = [.. options, $"pin={pin.ToString().ToLowerInvariant()}"];
+
+ if (allowBigBlock != null)
+ options = [.. options, $"allow-big-block={allowBigBlock.ToString().ToLowerInvariant()}"];
+
+ var json = await ipfs.UploadAsync("block/put", cancel, data, null, options);
+ var res = JObject.Parse(json).ToObject();
+ if (res is null)
+ throw new InvalidDataException("The response did not contain a block.");
+
+ return res;
}
- public async Task StatAsync(Cid id, CancellationToken cancel = default(CancellationToken))
+ public async Task StatAsync(Cid id, CancellationToken cancel = default)
{
var json = await ipfs.DoCommandAsync("block/stat", cancel, id);
- var info = JObject.Parse(json);
- return new Block
- {
- Size = (long)info["Size"],
- Id = (string)info["Key"]
- };
+
+ var parsed = JObject.Parse(json);
+ if (parsed is null)
+ throw new InvalidDataException("The response could not be parsed.");
+
+ var error = (string?)parsed["Error"];
+ if (error != null)
+ throw new HttpRequestException(error);
+
+ var res = parsed.ToObject();
+ if (res is null)
+ throw new InvalidDataException("The response could not be deserialized.");
+
+ return res;
}
- public async Task RemoveAsync(Cid id, bool ignoreNonexistent = false, CancellationToken cancel = default(CancellationToken))
+ public async Task RemoveAsync(Cid id, bool ignoreNonexistent = false, CancellationToken cancel = default)
{
var json = await ipfs.DoCommandAsync("block/rm", cancel, id, "force=" + ignoreNonexistent.ToString().ToLowerInvariant());
- if (json.Length == 0)
- return null;
- var result = JObject.Parse(json);
- var error = (string)result["Error"];
+
+ var parsed = JObject.Parse(json);
+ if (parsed is null)
+ throw new InvalidDataException("The response could not be parsed.");
+
+ var error = (string?)parsed["Error"];
if (error != null)
throw new HttpRequestException(error);
- return (Cid)(string)result["Hash"];
- }
+ var cid = parsed["Hash"]?.ToObject();
+ if (cid is null)
+ throw new InvalidDataException("The response could not be deserialized.");
+
+ return cid;
+ }
}
}
diff --git a/src/CoreApi/DagApi.cs b/src/CoreApi/DagApi.cs
index 6561c95..fbdf4ee 100644
--- a/src/CoreApi/DagApi.cs
+++ b/src/CoreApi/DagApi.cs
@@ -1,31 +1,36 @@
-using Ipfs.CoreApi;
-using Newtonsoft.Json;
-using Newtonsoft.Json.Linq;
+using Ipfs.CoreApi;
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+using System;
using System.Globalization;
-using System.IO;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Ipfs.Http
-{
- class DagApi : IDagApi
- {
- private IpfsClient ipfs;
-
- internal DagApi(IpfsClient ipfs)
- {
- this.ipfs = ipfs;
- }
-
- public async Task PutAsync(
+using System.IO;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using static Ipfs.CoreApi.CarImportOutput;
+
+#nullable enable
+
+namespace Ipfs.Http
+{
+ class DagApi : IDagApi
+ {
+ private IpfsClient ipfs;
+
+ internal DagApi(IpfsClient ipfs)
+ {
+ this.ipfs = ipfs;
+ }
+
+ public async Task PutAsync(
JObject data,
- string contentType = "dag-cbor",
- string multiHash = MultiHash.DefaultAlgorithmName,
- string encoding = MultiBase.DefaultAlgorithmName,
- bool pin = true,
- CancellationToken cancel = default(CancellationToken))
- {
+ string storeCodec = "dag-cbor",
+ string inputCodec = "dag-json",
+ bool? pin = null,
+ MultiHash? hash = null,
+ bool? allowBigBlock = null,
+ CancellationToken cancel = default)
+ {
using (var ms = new MemoryStream())
{
using (var sw = new StreamWriter(ms, new UTF8Encoding(false), 4096, true) { AutoFlush = true })
@@ -38,65 +43,150 @@ public async Task PutAsync(
serializer.Serialize(jw, data);
}
ms.Position = 0;
- return await PutAsync(ms, contentType, multiHash, encoding, pin, cancel);
+ return await PutAsync(ms, storeCodec, inputCodec, pin, hash, allowBigBlock, cancel);
}
}
public async Task PutAsync(
object data,
- string contentType = "dag-cbor",
- string multiHash = MultiHash.DefaultAlgorithmName,
- string encoding = MultiBase.DefaultAlgorithmName,
- bool pin = true,
- CancellationToken cancel = default(CancellationToken))
+ string storeCodec = "dag-cbor",
+ string inputCodec = "dag-json",
+ bool? pin = null,
+ MultiHash? hash = null,
+ bool? allowBigBlock = null,
+ CancellationToken cancel = default)
{
using (var ms = new MemoryStream(
Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(data)),
false))
{
- return await PutAsync(ms, contentType, multiHash, encoding, pin, cancel);
+ return await PutAsync(ms, storeCodec, inputCodec, pin, hash, allowBigBlock, cancel);
}
- }
+ }
public async Task PutAsync(
Stream data,
- string contentType = "dag-cbor",
- string multiHash = MultiHash.DefaultAlgorithmName,
- string encoding = MultiBase.DefaultAlgorithmName,
- bool pin = true,
- CancellationToken cancel = default(CancellationToken))
+ string storeCodec = "dag-cbor",
+ string inputCodec = "dag-json",
+ bool? pin = null,
+ MultiHash? hash = null,
+ bool? allowBigBlock = null,
+ CancellationToken cancel = default)
{
- var json = await ipfs.UploadAsync("dag/put", cancel,
- data, null,
- $"format={contentType}",
- $"pin={pin.ToString().ToLowerInvariant()}",
- $"hash={multiHash}",
- $"cid-base={encoding}");
- var result = JObject.Parse(json);
- return (Cid)(string)result["Cid"]["/"];
+ string[] options = [
+ $"store-codec={storeCodec}",
+ $"input-codec={inputCodec}"
+ ];
+
+ if (hash != null)
+ options = [.. options, $"hash={hash}"];
+
+ if (pin != null)
+ options = [.. options, $"pin={pin.ToString().ToLowerInvariant()}"];
+
+ if (allowBigBlock != null)
+ options = [.. options, $"allow-big-block={allowBigBlock.ToString().ToLowerInvariant()}"];
+
+ var json = await ipfs.UploadAsync("dag/put", cancel, data, null, options);
+
+ var parsed = JObject.Parse(json);
+ var cid = parsed["Cid"]?.ToObject();
+ if (cid is null)
+ throw new InvalidDataException("The response did not contain a CID.");
+
+ return (Cid)cid;
}
- public async Task GetAsync(
+ public async Task GetAsync(
Cid id,
- CancellationToken cancel = default(CancellationToken))
+ string outputCodec = "dag-json",
+ CancellationToken cancel = default)
{
- var json = await ipfs.DoCommandAsync("dag/get", cancel, id);
+ var json = await ipfs.DoCommandAsync("dag/get", cancel, id, $"output-codec={outputCodec}");
return JObject.Parse(json);
}
public async Task GetAsync(
string path,
- CancellationToken cancel = default(CancellationToken))
+ string outputCodec = "dag-json",
+ CancellationToken cancel = default)
{
- var json = await ipfs.DoCommandAsync("dag/get", cancel, path);
+ var json = await ipfs.DoCommandAsync("dag/get", cancel, path, $"output-codec={outputCodec}");
return JToken.Parse(json);
}
- public async Task GetAsync(Cid id, CancellationToken cancel = default(CancellationToken))
+ public async Task GetAsync(Cid id, string outputCodec = "dag-json", CancellationToken cancel = default)
+ {
+ var json = await ipfs.DoCommandAsync("dag/get", cancel, id, $"output-codec={outputCodec}");
+ var res = JsonConvert.DeserializeObject(json);
+ if (res is null)
+ throw new InvalidDataException($"The response did not deserialize to the provided type.");
+
+ return res;
+ }
+
+ public Task ResolveAsync(string path, CancellationToken cancel = default)
+ {
+ return ipfs.DoCommandAsync("dag/resolve", cancel, path);
+ }
+
+ public async Task StatAsync(string cid, IProgress? progress = null, CancellationToken cancel = default)
{
- var json = await ipfs.DoCommandAsync("dag/get", cancel, id);
- return JsonConvert.DeserializeObject(json);
+ using var stream = await ipfs.PostDownloadAsync("dag/stat", cancel, cid, $"progress={(progress is not null).ToString().ToLowerInvariant()}");
+ DagStatSummary? current = null;
+
+ // Read line-by-line
+ using var reader = new StreamReader(stream);
+ while (!reader.EndOfStream)
+ {
+ cancel.ThrowIfCancellationRequested();
+ var json = await reader.ReadLineAsync();
+
+ current = JsonConvert.DeserializeObject(json);
+
+ if (current is not null)
+ progress?.Report(current);
+ }
+
+ return current ?? throw new InvalidDataException("The response did not contain a DAG stat summary.");
+ }
+
+ public Task ExportAsync(string path, CancellationToken cancellationToken = default)
+ {
+ return ipfs.DownloadAsync("dag/export", cancellationToken, path);
+ }
+
+ public async Task ImportAsync(Stream stream, bool? pinRoots = null, bool stats = false, CancellationToken cancellationToken = default)
+ {
+ string[] options = [
+ $"pin-roots={pinRoots.ToString().ToLowerInvariant()}",
+ $"stats={stats.ToString().ToLowerInvariant()}"
+ ];
+
+ using var resultStream = await ipfs.Upload2Async("dag/import", cancellationToken, stream, null, options);
+
+ // Read line-by-line
+ using var reader = new StreamReader(resultStream);
+
+ // First output is always of type CarImportOutput
+ var json = await reader.ReadLineAsync();
+ var res = JsonConvert.DeserializeObject(json);
+ if (res is null)
+ throw new InvalidDataException($"The response did not deserialize to {nameof(CarImportOutput)}.");
+
+ // Second output is always of type DagStatSummary
+ if (stats)
+ {
+ json = await reader.ReadLineAsync();
+ var importStats = JsonConvert.DeserializeObject(json);
+ if (importStats is null)
+ throw new InvalidDataException($"The response did not deserialize a {nameof(CarImportStats)}.");
+
+ res.Stats = importStats;
+ }
+
+ return res;
}
- }
-}
+ }
+}
diff --git a/src/CoreApi/DupsResponse.cs b/src/CoreApi/DupsResponse.cs
deleted file mode 100644
index b983de4..0000000
--- a/src/CoreApi/DupsResponse.cs
+++ /dev/null
@@ -1,21 +0,0 @@
-using Ipfs.CoreApi;
-
-namespace Ipfs.Http.CoreApi
-{
- ///
- /// Model holding response from Dups command.
- ///
- public class DupsResponse : IDupsResponse
- {
- ///
- /// Any error in the Dups response.
- ///
- public string Err { get; set; }
-
- ///
- /// The error in the Dups response.
- ///
- public string Ref { get; set; }
- }
-
-}
diff --git a/src/CoreApi/FileSystemApi.cs b/src/CoreApi/FileSystemApi.cs
index 227b636..50c0b75 100644
--- a/src/CoreApi/FileSystemApi.cs
+++ b/src/CoreApi/FileSystemApi.cs
@@ -5,24 +5,29 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
+using System.Net;
+using System.Net.Http.Headers;
+using System.Net.Http;
+using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
+#nullable enable
+
namespace Ipfs.Http
{
+
class FileSystemApi : IFileSystemApi
- {
+ {
private IpfsClient ipfs;
- private Lazy emptyFolder;
internal FileSystemApi(IpfsClient ipfs)
{
this.ipfs = ipfs;
- this.emptyFolder = new Lazy(() => ipfs.Object.NewDirectoryAsync().Result);
}
- public async Task AddFileAsync(string path, AddFileOptions options = null, CancellationToken cancel = default)
+ public async Task AddFileAsync(string path, AddFileOptions? options = null, CancellationToken cancel = default)
{
using (var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read))
{
@@ -31,123 +36,89 @@ public async Task AddFileAsync(string path, AddFileOptions opti
}
}
- public Task AddTextAsync(string text, AddFileOptions options = null, CancellationToken cancel = default)
+ public Task AddTextAsync(string text, AddFileOptions? options = null, CancellationToken cancel = default)
{
return AddAsync(new MemoryStream(Encoding.UTF8.GetBytes(text), false), "", options, cancel);
}
- public async Task AddAsync(Stream stream, string name = "", AddFileOptions options = null, CancellationToken cancel = default)
+ public async Task AddAsync(Stream stream, string name = "", AddFileOptions? options = null, CancellationToken cancel = default)
{
- if (options == null)
- options = new AddFileOptions();
+ var filePart = new FilePart { Name = name, Data = stream };
+ await foreach (var item in AddAsync([filePart], [], options, cancel))
+ return item;
- var opts = new List();
- if (!options.Pin)
- opts.Add("pin=false");
- if (options.Wrap)
- opts.Add("wrap-with-directory=true");
- if (options.RawLeaves)
- opts.Add("raw-leaves=true");
- if (options.OnlyHash)
- opts.Add("only-hash=true");
- if (options.Trickle)
- opts.Add("trickle=true");
- if (options.Progress != null)
- opts.Add("progress=true");
- if (options.Hash != MultiHash.DefaultAlgorithmName)
- opts.Add($"hash=${options.Hash}");
- if (options.Encoding != MultiBase.DefaultAlgorithmName)
- opts.Add($"cid-base=${options.Encoding}");
- if (!string.IsNullOrWhiteSpace(options.ProtectionKey))
- opts.Add($"protect={options.ProtectionKey}");
-
- opts.Add($"chunker=size-{options.ChunkSize}");
-
- var response = await ipfs.Upload2Async("add", cancel, stream, name, opts.ToArray());
-
- // The result is a stream of LDJSON objects.
- // See https://github.com/ipfs/go-ipfs/issues/4852
- FileSystemNode fsn = null;
- using (var sr = new StreamReader(response))
- using (var jr = new JsonTextReader(sr) { SupportMultipleContent = true })
- {
- while (jr.Read())
- {
- var r = await JObject.LoadAsync(jr, cancel);
-
- // If a progress report.
- if (r.ContainsKey("Bytes"))
- {
- options.Progress?.Report(new TransferProgress
- {
- Name = (string)r["Name"],
- Bytes = (ulong)r["Bytes"]
- });
- }
-
- // Else must be an added file.
- else
- {
- fsn = new FileSystemNode
- {
- Id = (string)r["Hash"],
- Size = long.Parse((string)r["Size"]),
- IsDirectory = false,
- Name = name,
- };
- }
- }
- }
-
- fsn.IsDirectory = options.Wrap;
- return fsn;
+ throw new InvalidOperationException("No file nodes were provided");
}
- public async Task AddDirectoryAsync(string path, bool recursive = true, AddFileOptions options = null, CancellationToken cancel = default)
+ public async IAsyncEnumerable AddAsync(FilePart[] fileParts, FolderPart[] folderParts, AddFileOptions? options = default, [EnumeratorCancellation] CancellationToken cancel = default)
{
- if (options == null)
- options = new AddFileOptions();
- options.Wrap = false;
-
- // Add the files and sub-directories.
- path = Path.GetFullPath(path);
- var files = Directory
- .EnumerateFiles(path)
- .Select(p => AddFileAsync(p, options, cancel));
- if (recursive)
- {
- var folders = Directory
- .EnumerateDirectories(path)
- .Select(dir => AddDirectoryAsync(dir, recursive, options, cancel));
- files = files.Union(folders);
- }
-
- // go-ipfs v0.4.14 sometimes fails when sending lots of 'add file'
- // requests. It's happy with adding one file at a time.
-#if true
- var links = new List();
- foreach (var file in files)
- {
- var node = await file;
- links.Add(node.ToLink());
- }
-#else
- var nodes = await Task.WhenAll(files);
- var links = nodes.Select(node => node.ToLink());
-#endif
- // Create the directory with links to the created files and sub-directories
- var folder = emptyFolder.Value.AddLinks(links);
- var directory = await ipfs.Object.PutAsync(folder, cancel);
-
- return new FileSystemNode
+ string boundary = $"{Guid.NewGuid()}";
+ var content = new OrderedMultipartFormDataContent(boundary);
+
+ foreach (var folderPart in folderParts)
+ AddApiHeader(content, folderPart);
+
+ foreach (var filePart in fileParts)
+ AddApiHeader(content, filePart);
+
+ var url = ipfs.BuildCommand("add", null, ToApiOptions(options));
+
+ // Create the request message
+ var request = new HttpRequestMessage(HttpMethod.Post, url)
{
- Id = directory.Id,
- Name = Path.GetFileName(path),
- Links = links,
- IsDirectory = true,
- Size = directory.Size,
+ Content = content
};
+ // Enable chunked transfer encoding
+ request.Headers.TransferEncodingChunked = true;
+
+ // Remove the Content-Length header if it exists
+ request.Content.Headers.ContentLength = null;
+
+ using var response = await ipfs.Api().SendAsync(request, cancel);
+ await ipfs.ThrowOnErrorAsync(response);
+
+ // The result is a stream of LDJSON objects.
+ // See https://github.com/ipfs/go-ipfs/issues/4852
+ using var stream = await response.Content.ReadAsStreamAsync();
+ using var sr = new StreamReader(stream);
+
+ using var jr = new JsonTextReader(sr) { SupportMultipleContent = true };
+
+ while (await jr.ReadAsync(cancel))
+ {
+ cancel.ThrowIfCancellationRequested();
+ var r = await JObject.LoadAsync(jr, cancel);
+
+ // For the filestore, the hash can be output instead of the bytes. Verified with small files.
+ var isFilestoreProgressOutput = !r.TryGetValue("Hash", out _);
+
+ // For uploads, bytes are output to report progress.
+ var isUploadProgressOutput = r.TryGetValue("Bytes", out var bytes);
+
+ if (isUploadProgressOutput)
+ {
+ options?.Progress?.Report(new TransferProgress
+ {
+ Name = r["Name"]?.ToString() ?? throw new InvalidDataException("The response did not contain a name."),
+ Bytes = bytes?.ToObject() ?? 0,
+ });
+ }
+ else if (!isFilestoreProgressOutput)
+ {
+ var name = r["Name"]?.ToString() ?? throw new InvalidDataException("The response did not contain a name.");
+ yield return new FileSystemNode
+ {
+ Name = name,
+ Id = r["Hash"]?.ToString() ??
+ throw new InvalidDataException("The response did not contain a hash."),
+ Size = r["Size"] is { } sz
+ ? sz.ToObject()
+ : throw new InvalidDataException("The response did not contain a size."),
+ IsDirectory = folderParts.Any(x => x.Name == name),
+ };
+ }
+ }
}
///
@@ -172,7 +143,6 @@ public async Task ReadAllTextAsync(string path, CancellationToken cancel
}
}
-
///
/// Opens an existing IPFS file for reading.
///
@@ -193,15 +163,7 @@ public Task ReadFileAsync(string path, CancellationToken cancel = defaul
public Task ReadFileAsync(string path, long offset, long length = 0, CancellationToken cancel = default)
{
- // https://github.com/ipfs/go-ipfs/issues/5380
- if (offset > int.MaxValue)
- throw new NotSupportedException("Only int offsets are currently supported.");
- if (length > int.MaxValue)
- throw new NotSupportedException("Only int lengths are currently supported.");
-
- if (length == 0)
- length = int.MaxValue; // go-ipfs only accepts int lengths
- return ipfs.PostDownloadAsync("cat", cancel, path,
+ return ipfs.PostDownloadAsync("cat", cancel, path,
$"offset={offset}",
$"length={length}");
}
@@ -226,11 +188,12 @@ public async Task ListAsync(string path, CancellationToken canc
{
var json = await ipfs.DoCommandAsync("ls", cancel, path);
var r = JObject.Parse(json);
- var o = (JObject)r["Objects"]?[0];
+ var o = (JObject?)r["Objects"]?[0];
+ var h = (o?["Hash"])?.ToString() ?? throw new InvalidDataException("The response did not contain a hash.");
var node = new FileSystemNode()
{
- Id = (string)o["Hash"],
+ Id = h,
IsDirectory = true,
Links = Array.Empty(),
};
@@ -240,9 +203,9 @@ public async Task ListAsync(string path, CancellationToken canc
node.Links = links
.Select(l => new FileSystemLink()
{
- Name = (string)l["Name"],
- Id = (string)l["Hash"],
- Size = (long)l["Size"],
+ Name = l["Name"]?.ToString() ?? throw new InvalidDataException("The response did not contain a name."),
+ Id = l["Hash"]?.ToString() ?? throw new InvalidDataException("The response did not contain a hash."),
+ Size = l["Size"] is { } sz ? sz.ToObject() : throw new InvalidDataException("The response did not contain a size."),
})
.ToArray();
}
@@ -250,9 +213,108 @@ public async Task ListAsync(string path, CancellationToken canc
return node;
}
- public Task GetAsync(string path, bool compress = false, CancellationToken cancel = default)
- {
+ public Task GetAsync(string path, bool compress = false, CancellationToken cancel = default)
+ {
return ipfs.PostDownloadAsync("get", cancel, path, $"compress={compress}");
}
+
+ public void AddApiHeader(MultipartFormDataContent content, FolderPart folderPart)
+ {
+ // Use a ByteArrayContent with an empty byte array to signify no content
+ var folderContent = new ByteArrayContent([]); // Empty content
+ folderContent.Headers.ContentType = new MediaTypeHeaderValue("application/x-directory");
+ folderContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("form-data")
+ {
+ Name = "\"file\"",
+ FileName = $"\"{WebUtility.UrlEncode(folderPart.Name)}\""
+ };
+
+ // Add the content part to the multipart content
+ content.Add(folderContent);
+ }
+
+ public void AddApiHeader(MultipartFormDataContent content, FilePart filePart)
+ {
+ var streamContent = new StreamContent(filePart.Data ?? new MemoryStream());
+ streamContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
+
+ if (filePart.AbsolutePath is not null)
+ streamContent.Headers.Add("Abspath-Encoded", WebUtility.UrlEncode(filePart.AbsolutePath));
+
+ streamContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("form-data")
+ {
+ Name = "\"file\"",
+ FileName = $"\"{WebUtility.UrlEncode(filePart.Name)}\""
+ };
+
+ content.Add(streamContent);
+ }
+
+ private string[] ToApiOptions(AddFileOptions? options)
+ {
+ var opts = new List();
+
+ if (options is null)
+ return opts.ToArray();
+
+ if (options.CidVersion is not null)
+ opts.Add($"cid-version={options.CidVersion}");
+
+ if (options.Inline is not null)
+ opts.Add($"inline={options.Inline.ToString().ToLowerInvariant()}");
+
+ if (options.InlineLimit is not null)
+ opts.Add($"inline-limit={options.InlineLimit}");
+
+ if (options.NoCopy is not null)
+ opts.Add($"nocopy={options.NoCopy.ToString().ToLowerInvariant()}");
+
+ if (options.Pin is not null)
+ opts.Add("pin=false");
+
+ if (options.Wrap is not null)
+ opts.Add($"wrap-with-directory={options.Wrap.ToString().ToLowerInvariant()}");
+
+ if (options.RawLeaves is not null)
+ opts.Add($"raw-leaves={options.RawLeaves.ToString().ToLowerInvariant()}");
+
+ if (options.OnlyHash is not null)
+ opts.Add($"only-hash={options.OnlyHash.ToString().ToLowerInvariant()}");
+
+ if (options.Trickle is not null)
+ opts.Add($"trickle={options.Trickle.ToString().ToLowerInvariant()}");
+
+ if (options.Chunker is not null)
+ opts.Add($"chunker={options.Chunker}");
+
+ if (options.Progress is not null)
+ opts.Add("progress=true");
+
+ if (options.Hash is not null)
+ opts.Add($"hash=${options.Hash}");
+
+ if (options.FsCache is not null)
+ opts.Add($"fscache={options.Wrap.ToString().ToLowerInvariant()}");
+
+ if (options.ToFiles is not null)
+ opts.Add($"to-files={options.ToFiles}");
+
+ if (options.PreserveMode is not null)
+ opts.Add($"preserve-mode={options.PreserveMode.ToString().ToLowerInvariant()}");
+
+ if (options.PreserveMtime is not null)
+ opts.Add($"preserve-mtime={options.PreserveMtime.ToString().ToLowerInvariant()}");
+
+ if (options.Mode is not null)
+ opts.Add($"mode={options.Mode}");
+
+ if (options.Mtime is not null)
+ opts.Add($"mtime={options.Mtime}");
+
+ if (options.MtimeNsecs is not null)
+ opts.Add($"mtime-nsecs={options.MtimeNsecs}");
+
+ return opts.ToArray();
+ }
}
}
diff --git a/src/CoreApi/FilestoreApi.cs b/src/CoreApi/FilestoreApi.cs
index 9476ffe..ddf6f90 100644
--- a/src/CoreApi/FilestoreApi.cs
+++ b/src/CoreApi/FilestoreApi.cs
@@ -1,13 +1,11 @@
using Ipfs.CoreApi;
-using Ipfs.Http.CoreApi;
using Newtonsoft.Json;
-using Newtonsoft.Json.Linq;
-using System;
using System.Collections.Generic;
using System.IO;
-using System.Text;
+using System.Runtime.CompilerServices;
using System.Threading;
-using System.Threading.Tasks;
+
+#nullable enable
namespace Ipfs.Http
{
@@ -23,27 +21,62 @@ internal FilestoreApi(IpfsClient ipfs)
}
///
- public async Task ListAsync(string cid, bool fileOrder, CancellationToken token)
+ public async IAsyncEnumerable ListAsync(string? cid = null, bool? fileOrder = null, [EnumeratorCancellation] CancellationToken token = default)
{
- var json = await ipfs.DoCommandAsync("filestore/ls", token, cid, fileOrder.ToString());
+ string[] options = [];
+
+ if (fileOrder is not null)
+ options = [..options, $"file-order={fileOrder.ToString().ToLowerInvariant()}"];
+
+ using var stream = await ipfs.PostDownloadAsync("filestore/ls", token, cid, options);
+
+ // Read line-by-line
+ using var reader = new StreamReader(stream);
+ while (!reader.EndOfStream)
+ {
+ token.ThrowIfCancellationRequested();
+ var json = await reader.ReadLineAsync();
- return JsonConvert.DeserializeObject(json);
+ var res = JsonConvert.DeserializeObject(json);
+ if (res is not null)
+ yield return res;
+ }
}
///
- public async Task VerifyObjectsAsync(string cid, bool fileOrder, CancellationToken token)
+ public async IAsyncEnumerable VerifyObjectsAsync(string? cid = null, bool? fileOrder = null, [EnumeratorCancellation] CancellationToken token = default)
{
- var json = await ipfs.DoCommandAsync("filestore/verify", token, cid, fileOrder.ToString());
+ using var stream = await ipfs.PostDownloadAsync("filestore/verify", token, cid, $"{fileOrder}");
- return JsonConvert.DeserializeObject(json);
+ // Read line-by-line
+ using var reader = new StreamReader(stream);
+ while (!reader.EndOfStream)
+ {
+ token.ThrowIfCancellationRequested();
+ var json = await reader.ReadLineAsync();
+
+ var res = JsonConvert.DeserializeObject(json);
+ if (res is not null)
+ yield return res;
+ }
}
///
- public async Task DupsAsync(CancellationToken token)
+ public async IAsyncEnumerable DupsAsync([EnumeratorCancellation] CancellationToken token = default)
{
- var json = await ipfs.DoCommandAsync("filestore/dups", token);
+ using var stream = await ipfs.PostDownloadAsync("filestore/dups", token);
+
+ // Read line-by-line
+ using var reader = new StreamReader(stream);
+ while (!reader.EndOfStream)
+ {
+ token.ThrowIfCancellationRequested();
+ var json = await reader.ReadLineAsync();
- return JsonConvert.DeserializeObject(json);
+ var res = JsonConvert.DeserializeObject(json);
+ if (res is not null)
+ yield return res;
+ }
}
}
diff --git a/src/CoreApi/FilestoreKey.cs b/src/CoreApi/FilestoreKey.cs
deleted file mode 100644
index 5267294..0000000
--- a/src/CoreApi/FilestoreKey.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-using Ipfs.CoreApi;
-using Newtonsoft.Json;
-
-namespace Ipfs.Http
-{
- ///
- /// Model for the hold filestore key
- ///
- public class FilestoreKey : IFilestoreKey
- {
- ///
- /// Key value.
- ///
- [JsonProperty("/")]
- public string _ { get; set; }
- }
-
-}
diff --git a/src/CoreApi/FilestoreObjectResponse.cs b/src/CoreApi/FilestoreObjectResponse.cs
deleted file mode 100644
index 4332633..0000000
--- a/src/CoreApi/FilestoreObjectResponse.cs
+++ /dev/null
@@ -1,41 +0,0 @@
-using Ipfs.CoreApi;
-
-namespace Ipfs.Http
-{
- ///
- /// Model holding response to .
- ///
- public class FilestoreObjectResponse : IFilestoreApiObjectResponse
- {
- ///
- /// Holds any error message.
- ///
- public string ErrorMsg { get; set; }
-
- ///
- /// Path to the file
- ///
- public string FilePath { get; set; }
-
- ///
- /// The key to the Filestore.
- ///
- public FilestoreKey Key { get; set; }
-
- ///
- /// The response offset.
- ///
- public string Offset { get; set; }
-
- ///
- /// The size of the object.
- ///
- public string Size { get; set; }
-
- ///
- /// The object status.k
- ///
- public string Status { get; set; }
- }
-
-}
diff --git a/src/CoreApi/GenericApi.cs b/src/CoreApi/GenericApi.cs
index e6ca5d2..69299c9 100644
--- a/src/CoreApi/GenericApi.cs
+++ b/src/CoreApi/GenericApi.cs
@@ -25,6 +25,7 @@ public partial class IpfsClient : IGenericApi
var stream = await PostDownloadAsync("ping", cancel,
peer.ToString(),
$"count={count.ToString(CultureInfo.InvariantCulture)}");
+
return PingResultFromStream(stream);
}
@@ -34,6 +35,7 @@ public partial class IpfsClient : IGenericApi
var stream = await PostDownloadAsync("ping", cancel,
address.ToString(),
$"count={count.ToString(CultureInfo.InvariantCulture)}");
+
return PingResultFromStream(stream);
}
diff --git a/src/CoreApi/MfsApi.cs b/src/CoreApi/MfsApi.cs
index 147855b..0a11778 100644
--- a/src/CoreApi/MfsApi.cs
+++ b/src/CoreApi/MfsApi.cs
@@ -59,7 +59,7 @@ public async Task> ListAsync(string path, bool? U =
{
Name = (string)l["Name"],
Id = (string)l["Hash"],
- Size = (long)l["Size"],
+ Size = (ulong)l["Size"],
IsDirectory = (int)l["Type"] == 1,
})
.ToArray();
diff --git a/src/CoreApi/ObjectApi.cs b/src/CoreApi/ObjectApi.cs
deleted file mode 100644
index 8ea0210..0000000
--- a/src/CoreApi/ObjectApi.cs
+++ /dev/null
@@ -1,93 +0,0 @@
-using Ipfs.CoreApi;
-using Newtonsoft.Json.Linq;
-using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Ipfs.Http
-{
- class ObjectApi : IObjectApi
- {
- private IpfsClient ipfs;
-
- internal ObjectApi(IpfsClient ipfs)
- {
- this.ipfs = ipfs;
- }
-
- public Task NewDirectoryAsync(CancellationToken cancel = default(CancellationToken))
- {
- return NewAsync("unixfs-dir", cancel);
- }
-
- public async Task NewAsync(string template = null, CancellationToken cancel = default(CancellationToken))
- {
- var json = await ipfs.DoCommandAsync("object/new", cancel, template);
- var hash = (string)(JObject.Parse(json)["Hash"]);
- return await GetAsync(hash);
- }
-
- public async Task GetAsync(Cid id, CancellationToken cancel = default(CancellationToken))
- {
- var json = await ipfs.DoCommandAsync("object/get", cancel, id);
- return GetDagFromJson(json);
- }
-
- public Task PutAsync(byte[] data, IEnumerable links = null, CancellationToken cancel = default(CancellationToken))
- {
- return PutAsync(new DagNode(data, links), cancel);
- }
-
- public async Task PutAsync(DagNode node, CancellationToken cancel = default(CancellationToken))
- {
- var json = await ipfs.UploadAsync("object/put", cancel, node.ToArray(), "inputenc=protobuf");
- return node;
- }
-
- public Task DataAsync(Cid id, CancellationToken cancel = default(CancellationToken))
- {
- return ipfs.PostDownloadAsync("object/data", cancel, id);
- }
-
- public async Task> LinksAsync(Cid id, CancellationToken cancel = default(CancellationToken))
- {
- var json = await ipfs.DoCommandAsync("object/links", cancel, id);
- return GetDagFromJson(json).Links;
- }
-
- // TOOD: patch sub API
-
- DagNode GetDagFromJson(string json)
- {
- var result = JObject.Parse(json);
- byte[] data = null;
- var stringData = (string)result["Data"];
- if (stringData != null)
- data = Encoding.UTF8.GetBytes(stringData);
- var links = ((JArray)result["Links"])
- .Select(link => new DagLink(
- (string)link["Name"],
- (string)link["Hash"],
- (long)link["Size"]));
- return new DagNode(data, links);
- }
-
- public async Task StatAsync(Cid id, CancellationToken cancel = default(CancellationToken))
- {
- var json = await ipfs.DoCommandAsync("object/stat", cancel, id);
- var r = JObject.Parse(json);
-
- return new ObjectStat
- {
- LinkCount = (int)r["NumLinks"],
- LinkSize = (long)r["LinksSize"],
- BlockSize = (long)r["BlockSize"],
- DataSize = (long)r["DataSize"],
- CumulativeSize = (long)r["CumulativeSize"]
- };
- }
- }
-}
diff --git a/src/CoreApi/OrderedMultipartFormDataContent.cs b/src/CoreApi/OrderedMultipartFormDataContent.cs
new file mode 100644
index 0000000..eb456a5
--- /dev/null
+++ b/src/CoreApi/OrderedMultipartFormDataContent.cs
@@ -0,0 +1,102 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+#nullable enable
+
+namespace Ipfs.Http
+{
+ ///
+ /// An ordered .
+ ///
+ internal class OrderedMultipartFormDataContent : MultipartFormDataContent
+ {
+ private const string CrLf = "\r\n";
+
+ ///
+ /// Creates a new instance of .
+ ///
+ ///
+ public OrderedMultipartFormDataContent(string boundary)
+ : base(boundary)
+ {
+ Boundary = boundary;
+
+ // Remove the default Content-Type header set by MultipartFormDataContent
+ Headers.Remove("Content-Type");
+
+ // Add the Content-Type header without quotes around the boundary
+ Headers.TryAddWithoutValidation("Content-Type", $"multipart/form-data; boundary={boundary}");
+ }
+
+ ///
+ /// The boundary for this .
+ ///
+ public string Boundary { get; set; }
+
+ ///
+ protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context)
+ {
+ // Write start boundary.
+ await EncodeStringToStreamAsync(stream, "--" + Boundary + CrLf, default).ConfigureAwait(false);
+
+ // Write each nested content.
+ var output = new StringBuilder();
+ var items = this.ToList();
+ for (var contentIndex = 0; contentIndex < items.Count; contentIndex++)
+ {
+ // Write divider, headers, and content.
+ var content = items[contentIndex];
+ await EncodeStringToStreamAsync(stream, SerializeHeadersToString(output, contentIndex, content), default).ConfigureAwait(false);
+ await content.CopyToAsync(stream, context).ConfigureAwait(false);
+ }
+
+ // Write footer boundary.
+ await EncodeStringToStreamAsync(stream, CrLf + "--" + Boundary + "--" + CrLf, default).ConfigureAwait(false);
+ }
+
+ private static ValueTask EncodeStringToStreamAsync(Stream stream, string input, CancellationToken cancellationToken)
+ {
+ byte[] buffer = Encoding.UTF8.GetBytes(input);
+ return new ValueTask(stream.WriteAsync(buffer, 0, buffer.Length, cancellationToken));
+ }
+
+ private string SerializeHeadersToString(StringBuilder scratch, int contentIndex, HttpContent content)
+ {
+ scratch.Clear();
+
+ // Add divider.
+ if (contentIndex != 0) // Write divider for all but the first content.
+ {
+ scratch.Append(CrLf + "--"); // const strings
+ scratch.Append(Boundary);
+ scratch.Append(CrLf);
+ }
+
+ // Add headers.
+ foreach (KeyValuePair> headerPair in content.Headers.OrderBy(x=> x.Key))
+ {
+ scratch.Append(headerPair.Key);
+ scratch.Append(": ");
+ string delim = string.Empty;
+ foreach (string value in headerPair.Value)
+ {
+ scratch.Append(delim);
+ scratch.Append(value);
+ delim = ", ";
+ }
+ scratch.Append(CrLf);
+ }
+
+ // Extra CRLF to end headers (even if there are no headers).
+ scratch.Append(CrLf);
+
+ return scratch.ToString();
+ }
+ }
+}
diff --git a/src/FileSystemLink.cs b/src/FileSystemLink.cs
index 3315569..d087975 100644
--- a/src/FileSystemLink.cs
+++ b/src/FileSystemLink.cs
@@ -1,4 +1,6 @@
-namespace Ipfs.Http
+using Newtonsoft.Json;
+
+namespace Ipfs.Http
{
///
/// A link to another file system node in IPFS.
@@ -9,9 +11,10 @@ public class FileSystemLink : IFileSystemLink
public string Name { get; set; }
///
+ [JsonProperty("Hash")]
public Cid Id { get; set; }
///
- public long Size { get; set; }
+ public ulong Size { get; set; }
}
}
diff --git a/src/FileSystemNode.cs b/src/FileSystemNode.cs
index e1be657..0e5ec21 100644
--- a/src/FileSystemNode.cs
+++ b/src/FileSystemNode.cs
@@ -23,7 +23,7 @@ public class FileSystemNode : IFileSystemNode
/// of the block.
///
[DataMember]
- public long Size { get; set; }
+ public ulong Size { get; set; }
///
/// Determines if the link is a directory (folder).
diff --git a/src/IpfsClient.cs b/src/IpfsClient.cs
index 43d357a..9af8c98 100644
--- a/src/IpfsClient.cs
+++ b/src/IpfsClient.cs
@@ -71,9 +71,8 @@ public IpfsClient()
Dht = new DhtApi(this);
Swarm = new SwarmApi(this);
Dag = new DagApi(this);
- Object = new ObjectApi(this);
FileSystem = new FileSystemApi(this);
- FilestoreApi = new FilestoreApi(this);
+ Filestore = new FilestoreApi(this);
Mfs = new MfsApi(this);
PubSub = new PubSubApi(this);
Key = new KeyApi(this);
@@ -158,15 +157,11 @@ public IpfsClient(string host)
///
public ISwarmApi Swarm { get; private set; }
- ///
- public IObjectApi Object { get; private set; }
-
///
public IFileSystemApi FileSystem { get; private set; }
-
///
- public IFilestoreApi FilestoreApi { get; private set; }
+ public IFilestoreApi Filestore { get; private set; }
///
public IMfsApi Mfs { get; private set; }
@@ -177,7 +172,7 @@ public IpfsClient(string host)
///
public IKeyApi Key { get; private set; }
- Uri BuildCommand(string command, string arg = null, params string[] options)
+ internal Uri BuildCommand(string command, string arg = null, params string[] options)
{
var url = "/api/v0/" + command;
var q = new StringBuilder();
@@ -223,7 +218,7 @@ Uri BuildCommand(string command, string arg = null, params string[] options)
///
/// Only one client is needed. Its thread safe.
///
- HttpClient Api()
+ internal HttpClient Api()
{
if (api == null)
{
@@ -233,13 +228,12 @@ HttpClient Api()
{
if (HttpMessageHandler is HttpClientHandler handler && handler.SupportsAutomaticDecompression)
{
- handler.AutomaticDecompression = DecompressionMethods.GZip
- | DecompressionMethods.Deflate;
+ handler.AutomaticDecompression = DecompressionMethods.GZip;
}
api = new HttpClient(HttpMessageHandler)
{
- Timeout = Timeout.InfiniteTimeSpan
+ Timeout = Timeout.InfiniteTimeSpan,
};
api.DefaultRequestHeaders.Add("User-Agent", UserAgent);
@@ -500,6 +494,7 @@ public async Task UploadAsync(string command, CancellationToken cancel,
return json;
}
}
+
///
/// Perform an IPFS API command that
/// requires uploading of a "file".
@@ -579,7 +574,7 @@ public async Task UploadAsync(string command, CancellationToken cancel,
///
/// The API server returns an JSON error in the form { "Message": "...", "Code": ... }.
///
- async Task ThrowOnErrorAsync(HttpResponseMessage response)
+ internal async Task ThrowOnErrorAsync(HttpResponseMessage response)
{
if (response.IsSuccessStatusCode)
return true;
diff --git a/src/IpfsHttpClient.csproj b/src/IpfsHttpClient.csproj
index bf6ece9..2826886 100644
--- a/src/IpfsHttpClient.csproj
+++ b/src/IpfsHttpClient.csproj
@@ -9,7 +9,7 @@
true
- 0.5.1
+ 0.6.0
$(Version)
12.0
@@ -41,6 +41,24 @@
snupkg
.pdb;$(AllowedOutputExtensionsInPackageBuildOutputFolder)
+--- 0.6.0 ---
+[Breaking]
+Fixed WebRTC-Direct support added in Kubo 0.30.0.
+Removed the Object API completely, since Kubo replaced it with the DAG API and no longer offers it.
+Refactored the FileSystem API to:
+ - Enable proper directory uploads via a new `AddAsync` method that takes File and Folder parts separately.
+ - Bring `FileAddOptions` fully up-to-date with modern Kubo.
+ - Remove `AddDirectoryAsync` (used missing Object API).
+Updated several types to use one of int, long or ulong for Size matching the Kubo API.
+MerkleNode no longer has a static internal IpfsClient, and several properties that performed async calls synchronously were adjusted or removed.
+Block.DataBytes was removed as it was unused by any known interface or implementation.
+
+[New]
+Added FilestoreApi and the corresponding types.
+
+[Improvements]
+FilesystemApi.AddAsync now uses chunked transfer encoding, enabling uploading of very large files or groups of files.
+
--- 0.5.1 ---
[New]
Added support for MfsWriteOptions.Flush in MfsApi.WriteAsync.
@@ -84,7 +102,7 @@ Added missing IFileSystemApi.ListAsync. Doesn't fully replace the removed IFileS
-
+
@@ -93,5 +111,4 @@ Added missing IFileSystemApi.ListAsync. Doesn't fully replace the removed IFileS
runtime; build; native; contentfiles; analyzers; buildtransitive
-
diff --git a/src/MerkleNode.cs b/src/MerkleNode.cs
index 824a093..f1f6d65 100644
--- a/src/MerkleNode.cs
+++ b/src/MerkleNode.cs
@@ -1,12 +1,12 @@
using System;
using System.Collections.Generic;
-using System.IO;
using System.Runtime.Serialization;
namespace Ipfs.Http
{
///
- /// The IPFS MerkleDag is the datastructure at the heart of IPFS. It is an acyclic directed graph whose edges are hashes.
+ /// The IPFS MerkleDag is the datastructure at the heart of IPFS.
+ /// It is an acyclic directed graph whose edges are hashes.
///
///
/// Initially an MerkleNode is just constructed with its Cid.
@@ -14,11 +14,9 @@ namespace Ipfs.Http
[DataContract]
public class MerkleNode : IMerkleNode, IEquatable
{
- bool hasBlockStats;
- long blockSize;
+ ulong blockSize;
string name;
- IEnumerable links;
- IpfsClient ipfsClient;
+ IEnumerable links = [];
///
/// Creates a new instance of the with the specified
@@ -67,26 +65,6 @@ public MerkleNode(IMerkleLink link)
Id = link.Id;
Name = link.Name;
blockSize = link.Size;
- hasBlockStats = true;
- }
-
- internal IpfsClient IpfsClient
- {
- get
- {
- if (ipfsClient == null)
- {
- lock (this)
- {
- ipfsClient = new IpfsClient();
- }
- }
- return ipfsClient;
- }
- set
- {
- ipfsClient = value;
- }
}
///
@@ -103,67 +81,19 @@ public string Name
set { name = value ?? string.Empty; }
}
- ///
- /// Size of the raw, encoded node.
- ///
- [DataMember]
- public long BlockSize
- {
- get
- {
- GetBlockStats();
- return blockSize;
- }
- }
-
///
- ///
+ ///
[DataMember]
- public long Size
- {
- get
- {
- return BlockSize;
- }
- }
-
+ public ulong Size => blockSize;
///
[DataMember]
- public IEnumerable Links
- {
- get
- {
- if (links == null)
- {
- links = IpfsClient.Object.LinksAsync(Id).Result;
- }
-
- return links;
- }
- }
+ public IEnumerable Links => links;
///
public IMerkleLink ToLink(string name = null)
{
- return new DagLink(name ?? Name, Id, BlockSize);
- }
-
- ///
- /// Get block statistics about the node, ipfs block stat key
- ///
- ///
- /// The object stats include the block stats.
- ///
- void GetBlockStats()
- {
- if (hasBlockStats)
- return;
-
- var stats = IpfsClient.Block.StatAsync(Id).Result;
- blockSize = stats.Size;
-
- hasBlockStats = true;
+ return new DagLink(name ?? Name, Id, Size);
}
///
@@ -214,14 +144,5 @@ public override string ToString()
{
return "/ipfs/" + Id;
}
-
- ///
- /// TODO
- ///
- static public implicit operator MerkleNode(string hash)
- {
- return new MerkleNode(hash);
- }
-
}
}
diff --git a/src/PublishedMessage.cs b/src/PublishedMessage.cs
index ca257ca..609d640 100644
--- a/src/PublishedMessage.cs
+++ b/src/PublishedMessage.cs
@@ -29,9 +29,9 @@ public PublishedMessage(string json)
{
var o = JObject.Parse(json);
+ this.DataBytes = Multibase.Decode((string)o["data"], out MultibaseEncoding _);
this.Sender = (string)o["from"];
this.SequenceNumber = Multibase.Decode((string)o["seqno"], out MultibaseEncoding _);
- this.DataBytes = Multibase.Decode((string)o["data"], out MultibaseEncoding _);
var topics = (JArray) (o["topicIDs"]);
this.Topics = topics.Select(t => Encoding.UTF8.GetString(Multibase.Decode((string)t, out MultibaseEncoding _)));
@@ -62,12 +62,6 @@ public Stream DataStream
}
}
- ///
- [DataMember]
- public long Size
- {
- get { return DataBytes.Length; }
- }
///
/// Contents as a string.
///
diff --git a/test/CoreApi/BlockApiTest.cs b/test/CoreApi/BlockApiTest.cs
index 357f60d..9527d78 100644
--- a/test/CoreApi/BlockApiTest.cs
+++ b/test/CoreApi/BlockApiTest.cs
@@ -18,13 +18,13 @@ public class BlockApiTest
[TestMethod]
public async Task Put_Bytes()
{
- var cid = await ipfs.Block.PutAsync(blob);
- Assert.AreEqual(id, (string)cid);
+ var blockStat = await ipfs.Block.PutAsync(blob);
+ Assert.AreEqual(id, (string)blockStat.Id);
- var data = await ipfs.Block.GetAsync(cid);
+ var data = await ipfs.Block.GetAsync(blockStat.Id);
Assert.AreEqual(blob.Length, data.Length);
- var stream = await ipfs.FileSystem.ReadFileAsync(cid);
+ var stream = await ipfs.FileSystem.ReadFileAsync(blockStat.Id);
using var memoryStream = new MemoryStream();
await stream.CopyToAsync(memoryStream);
var bytes = memoryStream.ToArray();
@@ -35,10 +35,10 @@ public async Task Put_Bytes()
[TestMethod]
public void Put_Bytes_ContentType()
{
- var cid = ipfs.Block.PutAsync(blob, contentType: "raw").Result;
- Assert.AreEqual("bafkreiaxnnnb7qz2focittuqq3ya25q7rcv3bqynnczfzako47346wosmu", (string)cid);
+ var blockStat = ipfs.Block.PutAsync(blob).Result;
+ Assert.AreEqual("bafkreiaxnnnb7qz2focittuqq3ya25q7rcv3bqynnczfzako47346wosmu", (string)blockStat.Id);
- var data = ipfs.Block.GetAsync(cid).Result;
+ var data = ipfs.Block.GetAsync(blockStat.Id).Result;
Assert.AreEqual(blob.Length, data.Length);
CollectionAssert.AreEqual(blob, data);
}
@@ -46,10 +46,10 @@ public void Put_Bytes_ContentType()
[TestMethod]
public void Put_Bytes_Hash()
{
- var cid = ipfs.Block.PutAsync(blob, "raw", "sha2-512").Result;
- Assert.AreEqual("bafkrgqelljziv4qfg5mefz36m2y3h6voaralnw6lwb4f53xcnrf4mlsykkn7vt6eno547tw5ygcz62kxrle45wnbmpbofo5tvu57jvuaf7k7e", (string)cid);
+ var blockStat = ipfs.Block.PutAsync(blob, "raw", "sha2-512").Result;
+ Assert.AreEqual("bafkrgqelljziv4qfg5mefz36m2y3h6voaralnw6lwb4f53xcnrf4mlsykkn7vt6eno547tw5ygcz62kxrle45wnbmpbofo5tvu57jvuaf7k7e", (string)blockStat.Id);
- var data = ipfs.Block.GetAsync(cid).Result;
+ var data = ipfs.Block.GetAsync(blockStat.Id).Result;
Assert.AreEqual(blob.Length, data.Length);
CollectionAssert.AreEqual(blob, data);
}
@@ -58,23 +58,23 @@ public void Put_Bytes_Hash()
public void Put_Bytes_Pinned()
{
var data1 = new byte[] { 23, 24, 127 };
- var cid1 = ipfs.Block.PutAsync(data1, contentType: "raw", pin: true).Result;
+ var cid1 = ipfs.Block.PutAsync(data1, pin: true).Result;
var pins = ipfs.Pin.ListAsync().Result;
- Assert.IsTrue(pins.Any(pin => pin == cid1));
+ Assert.IsTrue(pins.Any(pin => pin == cid1.Id));
var data2 = new byte[] { 123, 124, 27 };
- var cid2 = ipfs.Block.PutAsync(data2, contentType: "raw", pin: false).Result;
+ var cid2 = ipfs.Block.PutAsync(data2, pin: false).Result;
pins = ipfs.Pin.ListAsync().Result;
- Assert.IsFalse(pins.Any(pin => pin == cid2));
+ Assert.IsFalse(pins.Any(pin => pin == cid2.Id));
}
[TestMethod]
public void Put_Stream()
{
- var cid = ipfs.Block.PutAsync(new MemoryStream(blob)).Result;
- Assert.AreEqual(id, (string)cid);
+ var blockStat = ipfs.Block.PutAsync(new MemoryStream(blob)).Result;
+ Assert.AreEqual(id, (string)blockStat.Id);
- var data = ipfs.Block.GetAsync(cid).Result;
+ var data = ipfs.Block.GetAsync(blockStat.Id).Result;
Assert.AreEqual(blob.Length, data.Length);
CollectionAssert.AreEqual(blob, data);
}
@@ -82,10 +82,10 @@ public void Put_Stream()
[TestMethod]
public void Put_Stream_ContentType()
{
- var cid = ipfs.Block.PutAsync(new MemoryStream(blob), contentType: "raw").Result;
- Assert.AreEqual("bafkreiaxnnnb7qz2focittuqq3ya25q7rcv3bqynnczfzako47346wosmu", (string)cid);
+ var blockStat = ipfs.Block.PutAsync(new MemoryStream(blob)).Result;
+ Assert.AreEqual("bafkreiaxnnnb7qz2focittuqq3ya25q7rcv3bqynnczfzako47346wosmu", (string)blockStat.Id);
- var data = ipfs.Block.GetAsync(cid).Result;
+ var data = ipfs.Block.GetAsync(blockStat.Id).Result;
Assert.AreEqual(blob.Length, data.Length);
CollectionAssert.AreEqual(blob, data);
}
@@ -93,10 +93,10 @@ public void Put_Stream_ContentType()
[TestMethod]
public void Put_Stream_Hash()
{
- var cid = ipfs.Block.PutAsync(new MemoryStream(blob), "raw", "sha2-512").Result;
- Assert.AreEqual("bafkrgqelljziv4qfg5mefz36m2y3h6voaralnw6lwb4f53xcnrf4mlsykkn7vt6eno547tw5ygcz62kxrle45wnbmpbofo5tvu57jvuaf7k7e", (string)cid);
+ var blockStat = ipfs.Block.PutAsync(new MemoryStream(blob), "raw", "sha2-512").Result;
+ Assert.AreEqual("bafkrgqelljziv4qfg5mefz36m2y3h6voaralnw6lwb4f53xcnrf4mlsykkn7vt6eno547tw5ygcz62kxrle45wnbmpbofo5tvu57jvuaf7k7e", (string)blockStat.Id);
- var data = ipfs.Block.GetAsync(cid).Result;
+ var data = ipfs.Block.GetAsync(blockStat.Id).Result;
Assert.AreEqual(blob.Length, data.Length);
CollectionAssert.AreEqual(blob, data);
}
@@ -105,14 +105,14 @@ public void Put_Stream_Hash()
public void Put_Stream_Pinned()
{
var data1 = new MemoryStream(new byte[] { 23, 24, 127 });
- var cid1 = ipfs.Block.PutAsync(data1, contentType: "raw", pin: true).Result;
+ var cid1 = ipfs.Block.PutAsync(data1, pin: true).Result;
var pins = ipfs.Pin.ListAsync().Result;
- Assert.IsTrue(pins.Any(pin => pin == cid1));
+ Assert.IsTrue(pins.Any(pin => pin == cid1.Id));
var data2 = new MemoryStream(new byte[] { 123, 124, 27 });
- var cid2 = ipfs.Block.PutAsync(data2, contentType: "raw", pin: false).Result;
+ var cid2 = ipfs.Block.PutAsync(data2, pin: false).Result;
pins = ipfs.Pin.ListAsync().Result;
- Assert.IsFalse(pins.Any(pin => pin == cid2));
+ Assert.IsFalse(pins.Any(pin => pin == cid2.Id));
}
[TestMethod]
diff --git a/test/CoreApi/FileSystemApiTest.cs b/test/CoreApi/FileSystemApiTest.cs
index 6df79cb..7c2e905 100644
--- a/test/CoreApi/FileSystemApiTest.cs
+++ b/test/CoreApi/FileSystemApiTest.cs
@@ -125,153 +125,19 @@ public async Task Add_Wrap()
}
[TestMethod]
- public async Task Add_SizeChunking()
+ public async Task GetTar_EmptyDirectory()
{
var ipfs = TestFixture.Ipfs;
- var options = new AddFileOptions
- {
- ChunkSize = 3
- };
- options.Pin = true;
- var node = await ipfs.FileSystem.AddTextAsync("hello world", options);
- Assert.AreEqual("QmVVZXWrYzATQdsKWM4knbuH5dgHFmrRqW3nJfDgdWrBjn", (string)node.Id);
- Assert.AreEqual(false, node.IsDirectory);
-
- var links = (await ipfs.Object.LinksAsync(node.Id)).ToArray();
- Assert.AreEqual(4, links.Length);
- Assert.AreEqual("QmevnC4UDUWzJYAQtUSQw4ekUdqDqwcKothjcobE7byeb6", (string)links[0].Id);
- Assert.AreEqual("QmTdBogNFkzUTSnEBQkWzJfQoiWbckLrTFVDHFRKFf6dcN", (string)links[1].Id);
- Assert.AreEqual("QmPdmF1n4di6UwsLgW96qtTXUsPkCLN4LycjEUdH9977d6", (string)links[2].Id);
- Assert.AreEqual("QmXh5UucsqF8XXM8UYQK9fHXsthSEfi78kewr8ttpPaLRE", (string)links[3].Id);
-
- var text = await ipfs.FileSystem.ReadAllTextAsync(node.Id);
- Assert.AreEqual("hello world", text);
- }
-
- [TestMethod]
- public async Task Add_Raw()
- {
- var ipfs = TestFixture.Ipfs;
- var options = new AddFileOptions
- {
- RawLeaves = true
- };
- var node = await ipfs.FileSystem.AddTextAsync("hello world", options);
- Assert.AreEqual("bafkreifzjut3te2nhyekklss27nh3k72ysco7y32koao5eei66wof36n5e", (string)node.Id);
- Assert.AreEqual(11, node.Size);
-
- var text = await ipfs.FileSystem.ReadAllTextAsync(node.Id);
- Assert.AreEqual("hello world", text);
- }
-
- [TestMethod]
- public async Task Add_RawAndChunked()
- {
- var ipfs = TestFixture.Ipfs;
- var options = new AddFileOptions
- {
- RawLeaves = true,
- ChunkSize = 3
- };
- var node = await ipfs.FileSystem.AddTextAsync("hello world", options);
- Assert.AreEqual("QmUuooB6zEhMmMaBvMhsMaUzar5gs5KwtVSFqG4C1Qhyhs", (string)node.Id);
- Assert.AreEqual(false, node.IsDirectory);
-
- var links = (await ipfs.Object.LinksAsync(node.Id)).ToArray();
- Assert.AreEqual(4, links.Length);
- Assert.AreEqual("bafkreigwvapses57f56cfow5xvoua4yowigpwcz5otqqzk3bpcbbjswowe", (string)links[0].Id);
- Assert.AreEqual("bafkreiew3cvfrp2ijn4qokcp5fqtoknnmr6azhzxovn6b3ruguhoubkm54", (string)links[1].Id);
- Assert.AreEqual("bafkreibsybcn72tquh2l5zpim2bba4d2kfwcbpzuspdyv2breaq5efo7tq", (string)links[2].Id);
- Assert.AreEqual("bafkreihfuch72plvbhdg46lef3n5zwhnrcjgtjywjryyv7ffieyedccchu", (string)links[3].Id);
-
- var text = await ipfs.FileSystem.ReadAllTextAsync(node.Id);
- Assert.AreEqual("hello world", text);
- }
-
- [TestMethod]
- public void AddDirectory()
- {
- var ipfs = TestFixture.Ipfs;
- var temp = MakeTemp();
- try
- {
- var dir = ipfs.FileSystem.AddDirectoryAsync(temp, false).Result;
- Assert.IsTrue(dir.IsDirectory);
-
- var files = dir.Links.ToArray();
- Assert.AreEqual(2, files.Length);
- Assert.AreEqual("alpha.txt", files[0].Name);
- Assert.AreEqual("beta.txt", files[1].Name);
-
- Assert.AreEqual("alpha", ipfs.FileSystem.ReadAllTextAsync(files[0].Id).Result);
- Assert.AreEqual("beta", ipfs.FileSystem.ReadAllTextAsync(files[1].Id).Result);
-
- Assert.AreEqual("alpha", ipfs.FileSystem.ReadAllTextAsync(dir.Id + "/alpha.txt").Result);
- Assert.AreEqual("beta", ipfs.FileSystem.ReadAllTextAsync(dir.Id + "/beta.txt").Result);
- }
- finally
- {
- DeleteTemp(temp);
- }
- }
-
- [TestMethod]
- public void AddDirectoryRecursive()
- {
- var ipfs = TestFixture.Ipfs;
- var temp = MakeTemp();
+ var temp = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
+ Directory.CreateDirectory(temp);
try
{
- var dir = ipfs.FileSystem.AddDirectoryAsync(temp, true).Result;
- Assert.IsTrue(dir.IsDirectory);
-
- var files = dir.Links.ToArray();
- Assert.AreEqual(3, files.Length);
- Assert.AreEqual("alpha.txt", files[0].Name);
- Assert.AreEqual("beta.txt", files[1].Name);
- Assert.AreEqual("x", files[2].Name);
- Assert.AreNotEqual(0, files[0].Size);
- Assert.AreNotEqual(0, files[1].Size);
+ IFileSystemNode dir = null;
+ await foreach (var item in ipfs.FileSystem.AddAsync([], [], null, default))
+ dir = item;
- var xfiles = new FileSystemNode
- {
- Id = files[2].Id,
- }.Links.ToArray();
- Assert.AreEqual(2, xfiles.Length);
- Assert.AreEqual("x.txt", xfiles[0].Name);
- Assert.AreEqual("y", xfiles[1].Name);
-
- var yfiles = new FileSystemNode
- {
- Id = xfiles[1].Id,
- }.Links.ToArray();
- Assert.AreEqual(1, yfiles.Length);
- Assert.AreEqual("y.txt", yfiles[0].Name);
+ var dirid = dir.Id.Encode();
- var y = new FileSystemNode
- {
- Id = yfiles[0].Id,
- };
-
- Assert.AreEqual("y", ipfs.FileSystem.ReadAllTextAsync(dir.Id + "/x/y/y.txt").Result);
- }
- finally
- {
- DeleteTemp(temp);
- }
- }
-
- [TestMethod]
- public async Task GetTar_EmptyDirectory()
- {
- var ipfs = TestFixture.Ipfs;
- var temp = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
- Directory.CreateDirectory(temp);
- try
- {
- var dir = ipfs.FileSystem.AddDirectoryAsync(temp, true).Result;
- var dirid = dir.Id.Encode();
-
using (var tar = await ipfs.FileSystem.GetAsync(dir.Id))
{
var buffer = new byte[3 * 512];
@@ -283,12 +149,12 @@ public async Task GetTar_EmptyDirectory()
offset += n;
}
Assert.AreEqual(-1, tar.ReadByte());
- }
- }
- finally
- {
- DeleteTemp(temp);
- }
+ }
+ }
+ finally
+ {
+ DeleteTemp(temp);
+ }
}
diff --git a/test/CoreApi/ObjectApiTest.cs b/test/CoreApi/ObjectApiTest.cs
deleted file mode 100644
index 122251a..0000000
--- a/test/CoreApi/ObjectApiTest.cs
+++ /dev/null
@@ -1,125 +0,0 @@
-using Microsoft.VisualStudio.TestTools.UnitTesting;
-using System.Linq;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace Ipfs.Http
-{
- [TestClass]
- public class ObjectApiTest
- {
- private IpfsClient ipfs = TestFixture.Ipfs;
-
- [TestMethod]
- public async Task New_Template_Null()
- {
- var node = await ipfs.Object.NewAsync();
- Assert.AreEqual("QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n", (string)node.Id);
- }
-
- [TestMethod]
- public async Task New_Template_UnixfsDir()
- {
- var node = await ipfs.Object.NewAsync("unixfs-dir");
- Assert.AreEqual("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn", (string)node.Id);
-
- node = await ipfs.Object.NewDirectoryAsync();
- Assert.AreEqual("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn", (string)node.Id);
-
- }
-
- [TestMethod]
- public async Task Put_Get_Dag()
- {
- var adata = Encoding.UTF8.GetBytes("alpha");
- var bdata = Encoding.UTF8.GetBytes("beta");
- var alpha = new DagNode(adata);
- var beta = new DagNode(bdata, new[] { alpha.ToLink() });
- var x = await ipfs.Object.PutAsync(beta);
- var node = await ipfs.Object.GetAsync(x.Id);
- CollectionAssert.AreEqual(beta.DataBytes, node.DataBytes);
- Assert.AreEqual(beta.Links.Count(), node.Links.Count());
- Assert.AreEqual(beta.Links.First().Id, node.Links.First().Id);
- Assert.AreEqual(beta.Links.First().Name, node.Links.First().Name);
- Assert.AreEqual(beta.Links.First().Size, node.Links.First().Size);
- }
-
- [TestMethod]
- public async Task Put_Get_Data()
- {
- var adata = Encoding.UTF8.GetBytes("alpha");
- var bdata = Encoding.UTF8.GetBytes("beta");
- var alpha = new DagNode(adata);
- var beta = await ipfs.Object.PutAsync(bdata, new[] { alpha.ToLink() });
- var node = await ipfs.Object.GetAsync(beta.Id);
- CollectionAssert.AreEqual(beta.DataBytes, node.DataBytes);
- Assert.AreEqual(beta.Links.Count(), node.Links.Count());
- Assert.AreEqual(beta.Links.First().Id, node.Links.First().Id);
- Assert.AreEqual(beta.Links.First().Name, node.Links.First().Name);
- Assert.AreEqual(beta.Links.First().Size, node.Links.First().Size);
- }
-
- [TestMethod]
- public async Task Data()
- {
- var adata = Encoding.UTF8.GetBytes("alpha");
- var node = await ipfs.Object.PutAsync(adata);
- using (var stream = await ipfs.Object.DataAsync(node.Id))
- {
- var bdata = new byte[adata.Length];
- stream.Read(bdata, 0, bdata.Length);
- CollectionAssert.AreEqual(adata, bdata);
- }
- }
-
- [TestMethod]
- public async Task Links()
- {
- var adata = Encoding.UTF8.GetBytes("alpha");
- var bdata = Encoding.UTF8.GetBytes("beta");
- var alpha = new DagNode(adata);
- var beta = await ipfs.Object.PutAsync(bdata, new[] { alpha.ToLink() });
- var links = await ipfs.Object.LinksAsync(beta.Id);
- Assert.AreEqual(beta.Links.Count(), links.Count());
- Assert.AreEqual(beta.Links.First().Id, links.First().Id);
- Assert.AreEqual(beta.Links.First().Name, links.First().Name);
- Assert.AreEqual(beta.Links.First().Size, links.First().Size);
- }
-
- [TestMethod]
- public async Task Stat()
- {
- var data1 = Encoding.UTF8.GetBytes("Some data 1");
- var data2 = Encoding.UTF8.GetBytes("Some data 2");
- var node2 = new DagNode(data2);
- var node1 = await ipfs.Object.PutAsync(data1,
- new[] { node2.ToLink("some-link") });
- var info = await ipfs.Object.StatAsync(node1.Id);
- Assert.AreEqual(1, info.LinkCount);
- Assert.AreEqual(64, info.BlockSize);
- Assert.AreEqual(53, info.LinkSize);
- Assert.AreEqual(11, info.DataSize);
- Assert.AreEqual(77, info.CumulativeSize);
- }
-
- [TestMethod]
- public async Task Get_Nonexistent()
- {
- var data = Encoding.UTF8.GetBytes("Some data for net-ipfs-http-client-test that cannot be found");
- var node = new DagNode(data);
- var id = node.Id;
- var cs = new CancellationTokenSource(500);
- try
- {
- var _ = await ipfs.Object.GetAsync(id, cs.Token);
- Assert.Fail("Did not throw TaskCanceledException");
- }
- catch (TaskCanceledException)
- {
- return;
- }
- }
-
- }
-}
diff --git a/test/IpfsHttpClientTests.csproj b/test/IpfsHttpClientTests.csproj
index 707f186..dbfe841 100644
--- a/test/IpfsHttpClientTests.csproj
+++ b/test/IpfsHttpClientTests.csproj
@@ -1,7 +1,8 @@
- net6.0
+ net6.0
+ 12.0
false
full
diff --git a/test/MerkleNodeTest.cs b/test/MerkleNodeTest.cs
index 0ca4e2b..f2cac5f 100644
--- a/test/MerkleNodeTest.cs
+++ b/test/MerkleNodeTest.cs
@@ -24,14 +24,6 @@ public void Stringify()
Assert.AreEqual("/ipfs/" + IpfsInfo, node.ToString());
}
- [TestMethod]
- public void FromString()
- {
- var a = new MerkleNode(IpfsInfo);
- var b = (MerkleNode)IpfsInfo;
- Assert.AreEqual(a, b);
- }
-
[TestMethod]
public void NullHash()
{
@@ -47,7 +39,7 @@ public void FromALink()
var link = new MerkleNode(node.Links.First());
Assert.AreEqual(link.Id, node.Links.First().Id);
Assert.AreEqual(link.Name, node.Links.First().Name);
- Assert.AreEqual(link.BlockSize, node.Links.First().Size);
+ Assert.AreEqual(link.Size, node.Links.First().Size);
}
[TestMethod]
@@ -57,7 +49,7 @@ public void ToALink()
var link = node.ToLink();
Assert.AreEqual(link.Id, node.Id);
Assert.AreEqual(link.Name, node.Name);
- Assert.AreEqual(link.Size, node.BlockSize);
+ Assert.AreEqual(link.Size, node.Size);
}