Got at least one data fetching method working; turns out, we can't use a patched LogicStack to get the data
This commit is contained in:
12
Vendor/EmbedIO-3.5.2/Files/Internal/Base64Utility.cs
vendored
Normal file
12
Vendor/EmbedIO-3.5.2/Files/Internal/Base64Utility.cs
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
|
||||
namespace EmbedIO.Files.Internal
|
||||
{
|
||||
internal static class Base64Utility
|
||||
{
|
||||
// long is 8 bytes
|
||||
// base64 of 8 bytes is 12 chars, but the last one is padding
|
||||
public static string LongToBase64(long value)
|
||||
=> Convert.ToBase64String(BitConverter.GetBytes(value)).Substring(0, 11);
|
||||
}
|
||||
}
|
||||
28
Vendor/EmbedIO-3.5.2/Files/Internal/EntityTag.cs
vendored
Normal file
28
Vendor/EmbedIO-3.5.2/Files/Internal/EntityTag.cs
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace EmbedIO.Files.Internal
|
||||
{
|
||||
internal static class EntityTag
|
||||
{
|
||||
public static string Compute(DateTime lastModifiedUtc, long length, CompressionMethod compressionMethod)
|
||||
{
|
||||
var sb = new StringBuilder()
|
||||
.Append('"')
|
||||
.Append(Base64Utility.LongToBase64(lastModifiedUtc.Ticks))
|
||||
.Append(Base64Utility.LongToBase64(length));
|
||||
|
||||
switch (compressionMethod)
|
||||
{
|
||||
case CompressionMethod.Deflate:
|
||||
sb.Append('-').Append(CompressionMethodNames.Deflate);
|
||||
break;
|
||||
case CompressionMethod.Gzip:
|
||||
sb.Append('-').Append(CompressionMethodNames.Gzip);
|
||||
break;
|
||||
}
|
||||
|
||||
return sb.Append('"').ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
164
Vendor/EmbedIO-3.5.2/Files/Internal/FileCacheItem.cs
vendored
Normal file
164
Vendor/EmbedIO-3.5.2/Files/Internal/FileCacheItem.cs
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
using System;
|
||||
using EmbedIO.Internal;
|
||||
|
||||
namespace EmbedIO.Files.Internal
|
||||
{
|
||||
internal sealed class FileCacheItem
|
||||
{
|
||||
#pragma warning disable SA1401 // Field should be private - performance is a stronger concern here.
|
||||
// These fields create a sort of linked list of items
|
||||
// inside the cache's dictionary.
|
||||
// Their purpose is to keep track of items
|
||||
// in order from least to most recently used.
|
||||
internal string? PreviousKey;
|
||||
internal string? NextKey;
|
||||
internal long LastUsedAt;
|
||||
#pragma warning restore SA1401
|
||||
|
||||
// Size of a pointer in bytes
|
||||
private static readonly long SizeOfPointer = Environment.Is64BitProcess ? 8 : 4;
|
||||
|
||||
// Size of a WeakReference<T> in bytes
|
||||
private static readonly long SizeOfWeakReference = Environment.Is64BitProcess ? 16 : 32;
|
||||
|
||||
// Educated guess about the size of an Item in memory (see comments on constructor).
|
||||
// 3 * SizeOfPointer + total size of fields, rounded up to a multiple of 16.
|
||||
//
|
||||
// Computed as follows:
|
||||
//
|
||||
// * for 32-bit:
|
||||
// - initialize count to 3 (number of "hidden" pointers that compose the object header)
|
||||
// - for every field / auto property, in order of declaration:
|
||||
// - increment count by 1 for reference types, 2 for long and DateTime
|
||||
// (as of time of writing there are no fields of other types here)
|
||||
// - increment again by 1 if this field "weighs" 1 and the next one "weighs" 2
|
||||
// (padding for field alignment)
|
||||
// - multiply count by 4 (size of a pointer)
|
||||
// - if the result is not a multiple of 16, round it up to next multiple of 16
|
||||
//
|
||||
// * for 64-bit:
|
||||
// - initialize count to 3 (number of "hidden" pointers that compose the object header)
|
||||
// - for every field / auto property, in order of declaration, increment count by 1
|
||||
// (at the time of writing there are no fields here that need padding on 64-bit)
|
||||
// - multiply count by 8 (size of a pointer)
|
||||
// - if the result is not a multiple of 16, round it up to next multiple of 16
|
||||
private static readonly long SizeOfItem = Environment.Is64BitProcess ? 96 : 128;
|
||||
|
||||
private readonly object _syncRoot = new object();
|
||||
|
||||
// Used to update total size of section.
|
||||
// Weak reference avoids circularity.
|
||||
private readonly WeakReference<FileCache.Section> _section;
|
||||
|
||||
// There are only 3 possible compression methods,
|
||||
// hence a dictionary (or two dictionaries) would be overkill.
|
||||
private byte[]? _uncompressedContent;
|
||||
private byte[]? _gzippedContent;
|
||||
private byte[]? _deflatedContent;
|
||||
|
||||
internal FileCacheItem(FileCache.Section section, DateTime lastModifiedUtc, long length)
|
||||
{
|
||||
_section = new WeakReference<FileCache.Section>(section);
|
||||
|
||||
LastModifiedUtc = lastModifiedUtc;
|
||||
Length = length;
|
||||
|
||||
// There is no way to know the actual size of an object at runtime.
|
||||
// This method makes some educated guesses, based on the following
|
||||
// article (among others):
|
||||
// https://codingsight.com/precise-computation-of-clr-object-size/
|
||||
// PreviousKey and NextKey values aren't counted in
|
||||
// because they are just references to existing strings.
|
||||
SizeInCache = SizeOfItem + SizeOfWeakReference;
|
||||
}
|
||||
|
||||
public DateTime LastModifiedUtc { get; }
|
||||
|
||||
public long Length { get; }
|
||||
|
||||
// This is the (approximate) in-memory size of this object.
|
||||
// It is NOT the length of the cache resource!
|
||||
public long SizeInCache { get; private set; }
|
||||
|
||||
public byte[]? GetContent(CompressionMethod compressionMethod)
|
||||
{
|
||||
// If there are both entity tag and content, use them.
|
||||
switch (compressionMethod)
|
||||
{
|
||||
case CompressionMethod.Deflate:
|
||||
if (_deflatedContent != null) return _deflatedContent;
|
||||
break;
|
||||
case CompressionMethod.Gzip:
|
||||
if (_gzippedContent != null) return _gzippedContent;
|
||||
break;
|
||||
default:
|
||||
if (_uncompressedContent != null) return _uncompressedContent;
|
||||
break;
|
||||
}
|
||||
|
||||
// Try to convert existing content, if any.
|
||||
byte[]? content;
|
||||
if (_uncompressedContent != null)
|
||||
{
|
||||
content = CompressionUtility.ConvertCompression(_uncompressedContent, CompressionMethod.None, compressionMethod);
|
||||
}
|
||||
else if (_gzippedContent != null)
|
||||
{
|
||||
content = CompressionUtility.ConvertCompression(_gzippedContent, CompressionMethod.Gzip, compressionMethod);
|
||||
}
|
||||
else if (_deflatedContent != null)
|
||||
{
|
||||
content = CompressionUtility.ConvertCompression(_deflatedContent, CompressionMethod.Deflate, compressionMethod);
|
||||
}
|
||||
else
|
||||
{
|
||||
// No content whatsoever.
|
||||
return null;
|
||||
}
|
||||
|
||||
return SetContent(compressionMethod, content);
|
||||
}
|
||||
|
||||
public byte[]? SetContent(CompressionMethod compressionMethod, byte[]? content)
|
||||
{
|
||||
// This is the bare minimum locking we need
|
||||
// to ensure we don't mess sizes up.
|
||||
byte[]? oldContent;
|
||||
lock (_syncRoot)
|
||||
{
|
||||
switch (compressionMethod)
|
||||
{
|
||||
case CompressionMethod.Deflate:
|
||||
oldContent = _deflatedContent;
|
||||
_deflatedContent = content;
|
||||
break;
|
||||
case CompressionMethod.Gzip:
|
||||
oldContent = _gzippedContent;
|
||||
_gzippedContent = content;
|
||||
break;
|
||||
default:
|
||||
oldContent = _uncompressedContent;
|
||||
_uncompressedContent = content;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var sizeDelta = GetSizeOf(content) - GetSizeOf(oldContent);
|
||||
SizeInCache += sizeDelta;
|
||||
if (_section.TryGetTarget(out var section))
|
||||
section.UpdateTotalSize(sizeDelta);
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
// Round up to a multiple of 16
|
||||
private static long RoundUpTo16(long n)
|
||||
{
|
||||
var remainder = n % 16;
|
||||
return remainder > 0 ? n + (16 - remainder) : n;
|
||||
}
|
||||
|
||||
// The size of a byte array is 3 * SizeOfPointer + 1 (size of byte) * Length
|
||||
private static long GetSizeOf(byte[]? arr) => arr == null ? 0 : RoundUpTo16(3 * SizeOfPointer) + arr.Length;
|
||||
}
|
||||
}
|
||||
73
Vendor/EmbedIO-3.5.2/Files/Internal/HtmlDirectoryLister.cs
vendored
Normal file
73
Vendor/EmbedIO-3.5.2/Files/Internal/HtmlDirectoryLister.cs
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using EmbedIO.Utilities;
|
||||
using Swan;
|
||||
|
||||
namespace EmbedIO.Files.Internal
|
||||
{
|
||||
internal class HtmlDirectoryLister : IDirectoryLister
|
||||
{
|
||||
private static readonly Lazy<IDirectoryLister> LazyInstance = new Lazy<IDirectoryLister>(() => new HtmlDirectoryLister());
|
||||
|
||||
private HtmlDirectoryLister()
|
||||
{
|
||||
}
|
||||
|
||||
public static IDirectoryLister Instance => LazyInstance.Value;
|
||||
|
||||
public string ContentType { get; } = MimeType.Html + "; encoding=" + WebServer.DefaultEncoding.WebName;
|
||||
|
||||
public async Task ListDirectoryAsync(
|
||||
MappedResourceInfo info,
|
||||
string absoluteUrlPath,
|
||||
IEnumerable<MappedResourceInfo> entries,
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
const int MaxEntryLength = 50;
|
||||
const int SizeIndent = -20; // Negative for right alignment
|
||||
|
||||
if (!info.IsDirectory)
|
||||
throw SelfCheck.Failure($"{nameof(HtmlDirectoryLister)}.{nameof(ListDirectoryAsync)} invoked with a file, not a directory.");
|
||||
|
||||
var encodedPath = WebUtility.HtmlEncode(absoluteUrlPath);
|
||||
using var text = new StreamWriter(stream, WebServer.DefaultEncoding);
|
||||
text.Write("<html><head><title>Index of ");
|
||||
text.Write(encodedPath);
|
||||
text.Write("</title></head><body><h1>Index of ");
|
||||
text.Write(encodedPath);
|
||||
text.Write("</h1><hr/><pre>");
|
||||
|
||||
if (encodedPath.Length > 1)
|
||||
text.Write("<a href='../'>../</a>\n");
|
||||
|
||||
entries = entries.ToArray();
|
||||
|
||||
foreach (var directory in entries.Where(m => m.IsDirectory).OrderBy(e => e.Name))
|
||||
{
|
||||
text.Write($"<a href=\"{Uri.EscapeDataString(directory.Name)}\">{WebUtility.HtmlEncode(directory.Name)}</a>");
|
||||
text.Write(new string(' ', Math.Max(1, MaxEntryLength - directory.Name.Length + 1)));
|
||||
text.Write(HttpDate.Format(directory.LastModifiedUtc));
|
||||
text.Write('\n');
|
||||
await Task.Yield();
|
||||
}
|
||||
|
||||
foreach (var file in entries.Where(m => m.IsFile).OrderBy(e => e.Name))
|
||||
{
|
||||
text.Write($"<a href=\"{Uri.EscapeDataString(file.Name)}\">{WebUtility.HtmlEncode(file.Name)}</a>");
|
||||
text.Write(new string(' ', Math.Max(1, MaxEntryLength - file.Name.Length + 1)));
|
||||
text.Write(HttpDate.Format(file.LastModifiedUtc));
|
||||
text.Write($" {file.Length.ToString("#,###", CultureInfo.InvariantCulture),SizeIndent}\n");
|
||||
await Task.Yield();
|
||||
}
|
||||
|
||||
text.Write("</pre><hr/></body></html>");
|
||||
}
|
||||
}
|
||||
}
|
||||
8
Vendor/EmbedIO-3.5.2/Files/Internal/MappedResourceInfoExtensions.cs
vendored
Normal file
8
Vendor/EmbedIO-3.5.2/Files/Internal/MappedResourceInfoExtensions.cs
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
namespace EmbedIO.Files.Internal
|
||||
{
|
||||
internal static class MappedResourceInfoExtensions
|
||||
{
|
||||
public static string GetEntityTag(this MappedResourceInfo @this, CompressionMethod compressionMethod)
|
||||
=> EntityTag.Compute(@this.LastModifiedUtc, @this.Length, compressionMethod);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user