Compare commits

...

15 Commits

Author SHA1 Message Date
metalgearsloth
e44e4ac7ed Version: 131.1.0 2023-06-25 22:00:17 +10:00
metalgearsloth
581ef074a0 Add method to get random variant tile (#4153) 2023-06-25 21:52:31 +10:00
metalgearsloth
9bb61b8a35 Add NextByte to random methods (#4151) 2023-06-25 21:52:15 +10:00
Leon Friedrich
e7497c7e4f Fix replay component state bug (#4157) 2023-06-25 21:51:15 +10:00
rene-descartes2021
7bc54d8f73 Cleanup redundant parentheses (#4155) 2023-06-25 21:51:00 +10:00
Pieter-Jan Briers
1631d93e41 Remove AggressiveOptimization attributes.
This isn't a magic "make code go fast" button, none of these usages were correct.
2023-06-25 09:33:10 +02:00
moonheart08
a32ff39516 Version: 131.0.0 2023-06-24 18:57:48 -05:00
Pieter-Jan Briers
2715581f48 Replays: final boss (#4156)
* Add ISawmill.Verbose log helpers

* Verbose logs for server-side networking handshake.

* Replays: final boss

It does zippies now. It fucking works. Amazingly, in fact.

* Add ZipArchive to sandbox
2023-06-24 18:56:37 -05:00
metalgearsloth
061c4352c6 Add SpawnEntities to entitymanager (#4152) 2023-06-23 10:55:28 +10:00
Leon Friedrich
8c5a34f538 Fix bad gamestate asserts (#4149) 2023-06-23 09:48:06 +10:00
Leon Friedrich
55d99155e0 Better deserialization error messages (#4150) 2023-06-21 09:43:49 +10:00
ElectroJr
4c994eb599 Version: 130.0.0 2023-06-18 17:03:02 -04:00
ElectroJr
25fa6b2b2e Change default engine version cvar 2023-06-18 17:00:39 -04:00
ElectroJr
530321bcb6 Version: 0.129.0.1 2023-06-18 15:39:47 -04:00
ElectroJr
da860d4f56 Update Version.py to remove leading zero. 2023-06-18 13:43:32 -04:00
43 changed files with 1140 additions and 472 deletions

View File

@@ -1,4 +1,4 @@
<Project>
<!-- This file automatically reset by Tools/version.py -->
<!-- This file automatically reset by Tools/version.py -->

View File

@@ -54,6 +54,47 @@ END TEMPLATE-->
*None yet*
## 131.1.0
### New features
* Add NextByte method to random.
* Add method to get a random tile variant.
### Bugfixes
* Fix replay component state bug.
### Internal
* Remove some AggressiveOptimization attributes.
## 131.0.0
### Breaking changes
* `IWritableDirProvider` async functions have been removed.
* Replay recording & load API has been reworked to operate on zip files instead.
* Constants on `IReplayRecordingManager` have been moved to a new `ReplayConstants` class, renamed and values changed.
### New features
* Added `ISawmill.Verbose()` log functions.
* Replays are now written as `.zip` files. These will be [content bundles](https://docs.spacestation14.io/en/launcher/content-bundles) directly executable by the launcher if the server has the necessary build information.
* Client replays now use local time rather than UTC as default file name.
## 130.0.0
### Breaking changes
* Engine versions will no longer start with a leading 0.
## 0.129.0.1
## 129.0.0
### Breaking changes

View File

@@ -1,3 +1,4 @@
using System.IO.Compression;
using System.Linq;
using JetBrains.Annotations;
using Robust.Client.Replays.Loading;
@@ -42,15 +43,17 @@ public sealed class ReplayLoadCommand : BaseReplayCommand
return;
}
var dir = new ResPath(_cfg.GetCVar(CVars.ReplayDirectory)) / args[0];
var file = dir / IReplayRecordingManager.MetaFile;
var file = new ResPath(_cfg.GetCVar(CVars.ReplayDirectory)) / args[0];
if (!_resMan.UserData.Exists(file))
{
shell.WriteError(Loc.GetString("cmd-error-file-not-found", ("file", file)));
return;
}
_loadMan.LoadAndStartReplay(_resMan.UserData, dir);
var stream = _resMan.UserData.OpenRead(file);
var provider = new ReplayFileReaderZip(new ZipArchive(stream), ReplayConstants.ReplayZipFolder);
_loadMan.LoadAndStartReplay(provider);
}
public override CompletionResult GetCompletion(IConsoleShell shell, string[] args)
@@ -61,7 +64,6 @@ public sealed class ReplayLoadCommand : BaseReplayCommand
var dir = new ResPath(_cfg.GetCVar(CVars.ReplayDirectory)) / args[0];
dir = dir.ToRootedPath();
var opts = CompletionHelper.UserFilePath(dir.CanonPath, _resMan.UserData);
opts = opts.Where(x => _resMan.UserData.IsDir(new ResPath(x.Value)));
return CompletionResult.FromHintOptions(opts, Loc.GetString("cmd-replay-load-hint"));
}

View File

@@ -0,0 +1,140 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using Robust.Shared.ContentPack;
using Robust.Shared.Utility;
namespace Robust.Client.Replays.Loading;
/// <summary>
/// Simple interface that the replay system loads files from.
/// </summary>
public interface IReplayFileReader : IDisposable
{
/// <summary>
/// Check whether a file exists in the replay data.
/// </summary>
/// <param name="path">The path to check. Doesn't need to be rooted.</param>
/// <returns>True if the file exists.</returns>
bool Exists(ResPath path);
/// <summary>
/// Open a file in the replay data.
/// </summary>
/// <param name="path">The path to the file. Doesn't need to be rooted.</param>
/// <returns>A stream containing the file contents.</returns>
/// <exception cref="FileNotFoundException">Thrown if the file does not exist.</exception>
Stream Open(ResPath path);
/// <summary>
/// Returns all files in the replay data.
/// </summary>
/// <remarks>
/// File paths are rooted.
/// </remarks>
IEnumerable<ResPath> AllFiles { get; }
}
/// <summary>
/// Replay file reader that loads files from the VFS (<see cref="IResourceManager"/>).
/// </summary>
public sealed class ReplayFileReaderResources : IReplayFileReader
{
private readonly IResourceManager _resourceManager;
private readonly ResPath _prefix;
/// <param name="resourceManager">The resource manager.</param>
/// <param name="prefix">The directory in the VFS that contains the replay files. Must be rooted.</param>
public ReplayFileReaderResources(IResourceManager resourceManager, ResPath prefix)
{
_resourceManager = resourceManager;
_prefix = prefix;
}
public bool Exists(ResPath path)
{
return _resourceManager.ContentFileExists(GetPath(path));
}
public Stream Open(ResPath path)
{
return _resourceManager.ContentFileRead(GetPath(path));
}
public IEnumerable<ResPath> AllFiles
{
get
{
foreach (var path in _resourceManager.ContentFindRelativeFiles(_prefix))
{
yield return path.ToRelativePath();
}
}
}
private ResPath GetPath(ResPath path) => _prefix / path.ToRelativePath();
public void Dispose()
{
// Don't need to do anything.
}
}
/// <summary>
/// Replay file reader that loads files from a zip file.
/// </summary>
/// <remarks>
/// The zip archive is disposed when this instance is disposed.
/// </remarks>
public sealed class ReplayFileReaderZip : IReplayFileReader
{
private readonly ZipArchive _archive;
private readonly ResPath _prefix;
/// <param name="archive">The archive to read files from.</param>
/// <param name="prefix">The directory in the zip that contains the replay files. Must NOT be rooted.</param>
public ReplayFileReaderZip(ZipArchive archive, ResPath prefix)
{
_archive = archive;
_prefix = prefix;
}
public bool Exists(ResPath path)
{
return GetEntry(path) != null;
}
public Stream Open(ResPath path)
{
var entry = GetEntry(path);
if (entry == null)
throw new FileNotFoundException();
return entry.Open();
}
public IEnumerable<ResPath> AllFiles
{
get
{
foreach (var entry in _archive.Entries)
{
// Ignore directories.
if (entry.FullName.EndsWith("/"))
continue;
var entryPath = new ResPath(entry.FullName);
if (entryPath.TryRelativeTo(_prefix, out var path))
yield return path.Value.ToRootedPath();
}
}
}
private ZipArchiveEntry? GetEntry(ResPath path) => _archive.GetEntry((_prefix / path.ToRelativePath()).ToString());
public void Dispose()
{
_archive.Dispose();
}
}

View File

@@ -17,9 +17,8 @@ public interface IReplayLoadManager
/// <summary>
/// Load metadata information from a replay's yaml file.
/// </summary>
/// <param name="dir">A directory containing the replay files.</param>
/// <param name="path">The path to the replay's subdirectory.</param>
public MappingDataNode? LoadYamlMetadata(IWritableDirProvider dir, ResPath path);
/// <param name="fileReader">A reader containing the replay files. Disposed when loading is done.</param>
public MappingDataNode? LoadYamlMetadata(IReplayFileReader fileReader);
/// <summary>
/// Async task that loads up a replay for playback. Note that this will have some side effects, such as loading
@@ -29,11 +28,10 @@ public interface IReplayLoadManager
/// This task is intended to be used with a <see cref="Job{T}"/> so that the loading can happen over several frame
/// updates.
/// </remarks>
/// <param name="dir">A directory containing the replay data that should be loaded.</param>
/// <param name="path">The path to the replay's subdirectory.</param>
/// <param name="fileReader">A reader containing the replay files. Disposed when loading is done.</param>
/// <param name="callback">A callback delegate that invoked to provide information about the current loading
/// progress. This callback can be used to invoke <see cref="Job{T}.SuspendIfOutOfTime"/>. </param>
Task<ReplayData> LoadReplayAsync(IWritableDirProvider dir, ResPath path, LoadReplayCallback callback);
Task<ReplayData> LoadReplayAsync(IReplayFileReader fileReader, LoadReplayCallback callback);
/// <summary>
/// Async task that loads the initial state of a replay, including spawning and initializing all entities. Note that
@@ -55,17 +53,16 @@ public interface IReplayLoadManager
/// This task is intended to be used with a <see cref="Job{T}"/> so that the loading can happen over several frame
/// updates.
/// </remarks>
/// <param name="dir">A directory containing the replay files.</param>
/// <param name="path">The path to the replay's subdirectory.</param>
/// <param name="fileReader">A reader containing the replay files. Disposed when loading is done.</param>
/// <param name="callback">A callback delegate that invoked to provide information about the current loading
/// progress. This callback can be used to invoke <see cref="Job{T}.SuspendIfOutOfTime"/>. </param>
Task LoadAndStartReplayAsync(IWritableDirProvider dir, ResPath path, LoadReplayCallback? callback = null);
Task LoadAndStartReplayAsync(IReplayFileReader fileReader, LoadReplayCallback? callback = null);
/// <summary>
/// This is a variant of <see cref="LoadAndStartReplayAsync"/> that will first invoke <see cref="LoadOverride"/>
/// before defaulting to simply simply running <see cref="LoadAndStartReplayAsync"/> synchronously.
/// </summary>
void LoadAndStartReplay(IWritableDirProvider resManUserData, ResPath dir);
void LoadAndStartReplay(IReplayFileReader fileReader);
/// <summary>
/// Event that can be used to override the default replay loading behaviour.
@@ -74,7 +71,7 @@ public interface IReplayLoadManager
/// E.g., this could be used to make the <see cref="ReplayLoadCommand"/> switch to some loading screen with an async
/// load job, rather than just hanging the client.
/// </remarks>
event Action<IWritableDirProvider, ResPath>? LoadOverride;
event Action<IReplayFileReader>? LoadOverride;
}
public delegate Task LoadReplayCallback(float current, float max, LoadingState state, bool forceSuspend);

View File

@@ -12,25 +12,22 @@ namespace Robust.Client.Replays.Loading;
[Virtual]
public class LoadReplayJob : Job<bool>
{
private readonly IWritableDirProvider _dir;
private readonly ResPath _path;
private readonly IReplayFileReader _fileReader;
private readonly IReplayLoadManager _loadMan;
public LoadReplayJob(
float maxTime,
IWritableDirProvider dir,
ResPath path,
IReplayFileReader fileReader,
IReplayLoadManager loadMan)
: base(maxTime)
{
_dir = dir;
_path = path;
_fileReader = fileReader;
_loadMan = loadMan;
}
protected override async Task<bool> Process()
{
await _loadMan.LoadAndStartReplayAsync(_dir, _path, Yield);
await _loadMan.LoadAndStartReplayAsync(_fileReader, Yield);
return true;
}

View File

@@ -331,7 +331,7 @@ public sealed partial class ReplayLoadManager
{
var existing = combined[index];
if (!newCompStates.TryGetValue(existing.NetID, out var newCompState))
if (!newCompStates.Remove(existing.NetID, out var newCompState))
continue;
if (newCompState.State is not IComponentDeltaState delta || delta.FullState)
@@ -344,6 +344,14 @@ public sealed partial class ReplayLoadManager
combined[index] = new ComponentChange(existing.NetID, delta.CreateNewFullState(existing.State), newCompState.LastModifiedTick);
}
foreach (var compChange in newCompStates.Values)
{
// I'm not 100% sure about this, but I think delta states should always be full states here?
DebugTools.Assert(compChange.State is not IComponentDeltaState delta || delta.FullState);
combined.Add(compChange);
}
DebugTools.Assert(newState.NetComponents == null || newState.NetComponents.Count == combined.Count);
return new EntityState(newState.Uid, combined, newState.EntityLastModified, newState.NetComponents ?? oldNetComps);
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using Robust.Shared.ContentPack;
using Robust.Shared.GameStates;
using Robust.Shared.Serialization;
using Robust.Shared.Serialization.Markdown;
@@ -13,15 +12,17 @@ using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Robust.Shared.Replays;
using static Robust.Shared.Replays.IReplayRecordingManager;
using static Robust.Shared.Replays.ReplayConstants;
namespace Robust.Client.Replays.Loading;
public sealed partial class ReplayLoadManager
{
[SuppressMessage("ReSharper", "UseAwaitUsing")]
public async Task<ReplayData> LoadReplayAsync(IWritableDirProvider dir, ResPath path, LoadReplayCallback callback)
public async Task<ReplayData> LoadReplayAsync(IReplayFileReader fileReader, LoadReplayCallback callback)
{
using var _ = fileReader;
if (_client.RunLevel == ClientRunLevel.Initialize)
_client.StartSinglePlayer();
else if (_client.RunLevel != ClientRunLevel.SinglePlayerGame)
@@ -32,31 +33,21 @@ public sealed partial class ReplayLoadManager
List<ReplayMessage> messages = new();
var compressionContext = new ZStdCompressionContext();
var metaData = LoadMetadata(dir, path);
var metaData = LoadMetadata(fileReader);
// Strip tailing "/"
// why is there no method for this.
if (path.CanonPath.EndsWith("/"))
path = new(path.CanonPath.Substring(0, path.CanonPath.Length - 1));
var total = dir.Find($"{path.ToRelativePath()}/*.{Ext}").files.Count();
// Exclude string & init event files from the total.
total--;
if (dir.Exists(path / InitFile))
total--;
var totalData = fileReader.AllFiles.Count(x => x.Filename.StartsWith(DataFilePrefix));
var i = 0;
var intBuf = new byte[4];
var name = path / $"{i++}.{Ext}";
while (dir.Exists(name))
var name = new ResPath($"{DataFilePrefix}{i++}.{Ext}");
while (fileReader.Exists(name))
{
await callback(i+1, total, LoadingState.ReadingFiles, false);
await callback(i+1, totalData, LoadingState.ReadingFiles, false);
using var fileStream = dir.OpenRead(name);
using var fileStream = fileReader.Open(name);
using var decompressStream = new ZStdDecompressStream(fileStream, false);
fileStream.Read(intBuf);
fileStream.ReadExactly(intBuf);
var uncompressedSize = BitConverter.ToInt32(intBuf);
var decompressedStream = new MemoryStream(uncompressedSize);
@@ -72,12 +63,16 @@ public sealed partial class ReplayLoadManager
messages.Add(msg);
}
name = path / $"{i++}.{Ext}";
name = new ResPath($"{DataFilePrefix}{i++}.{Ext}");
}
DebugTools.Assert(i - 1 == total);
await callback(total, total, LoadingState.ReadingFiles, false);
var initData = LoadInitFile(dir, path, compressionContext);
// Could happen if there's gaps in the numbers of the data.
if (i - 1 != totalData)
throw new Exception("Could not read expected amount of data files from replay");
await callback(totalData, totalData, LoadingState.ReadingFiles, false);
var initData = LoadInitFile(fileReader, compressionContext);
compressionContext.Dispose();
var (checkpoints, serverTime) = await GenerateCheckpointsAsync(
@@ -101,54 +96,67 @@ public sealed partial class ReplayLoadManager
}
private ReplayMessage? LoadInitFile(
IWritableDirProvider dir,
ResPath path,
IReplayFileReader fileReader,
ZStdCompressionContext compressionContext)
{
if (!dir.Exists(path / InitFile))
if (!fileReader.Exists(FileInit))
return null;
// TODO replays compress init messages, then decompress them here.
using var fileStream = dir.OpenRead(path / InitFile);
using var fileStream = fileReader.Open(FileInit);
_serializer.DeserializeDirect(fileStream, out ReplayMessage initData);
return initData;
}
public MappingDataNode? LoadYamlMetadata(IWritableDirProvider directory, ResPath resPath)
public MappingDataNode? LoadYamlMetadata(IReplayFileReader fileReader)
{
if (!directory.Exists(resPath / MetaFile))
return LoadYamlFile(fileReader, FileMeta);
}
public MappingDataNode? LoadYamlFinalMetadata(IReplayFileReader fileReader)
{
return LoadYamlFile(fileReader, FileMetaFinal);
}
private static MappingDataNode? LoadYamlFile(IReplayFileReader fileReader, ResPath path)
{
if (!fileReader.Exists(path))
return null;
using var file = directory.OpenRead(resPath / MetaFile);
using var file = fileReader.Open(path);
var parsed = DataNodeParser.ParseYamlStream(new StreamReader(file));
return parsed.FirstOrDefault()?.Root as MappingDataNode;
}
private (MappingDataNode YamlData, HashSet<string> CVars, TimeSpan Duration, TimeSpan StartTime, bool ClientSide)
LoadMetadata(IWritableDirProvider directory, ResPath path)
LoadMetadata(IReplayFileReader fileReader)
{
_sawmill.Info($"Reading replay metadata");
var data = LoadYamlMetadata(directory, path);
var data = LoadYamlMetadata(fileReader);
if (data == null)
throw new Exception("Failed to parse yaml metadata");
throw new Exception("Failed to load yaml metadata");
var typeHash = Convert.FromHexString(((ValueDataNode) data[Hash]).Value);
var stringHash = Convert.FromHexString(((ValueDataNode) data[Strings]).Value);
var startTick = ((ValueDataNode) data[Tick]).Value;
var timeBaseTick = ((ValueDataNode) data[BaseTick]).Value;
var timeBaseTimespan = ((ValueDataNode) data[BaseTime]).Value;
var clientSide = bool.Parse(((ValueDataNode) data[IsClient]).Value);
var duration = TimeSpan.Parse(((ValueDataNode) data[Duration]).Value);
var finalData = LoadYamlFinalMetadata(fileReader);
if (finalData == null)
throw new Exception("Failed to load final yaml metadata");
var typeHash = Convert.FromHexString(((ValueDataNode) data[MetaKeyTypeHash]).Value);
var stringHash = Convert.FromHexString(((ValueDataNode) data[MetaKeyStringHash]).Value);
var startTick = ((ValueDataNode) data[MetaKeyStartTick]).Value;
var timeBaseTick = ((ValueDataNode) data[MetaKeyBaseTick]).Value;
var timeBaseTimespan = ((ValueDataNode) data[MetaKeyBaseTime]).Value;
var clientSide = bool.Parse(((ValueDataNode) data[MetaKeyIsClientRecording]).Value);
var duration = TimeSpan.Parse(((ValueDataNode) finalData[MetaFinalKeyDuration]).Value);
if (!typeHash.SequenceEqual(_serializer.GetSerializableTypesHash()))
throw new Exception($"{nameof(IRobustSerializer)} hashes do not match. Loading replays using a bad replay-client version?");
using var stringFile = directory.OpenRead(path / StringsFile);
using var stringFile = fileReader.Open(FileStrings);
var stringData = new byte[stringFile.Length];
stringFile.Read(stringData);
stringFile.ReadExactly(stringData);
_serializer.SetStringSerializerPackage(stringHash, stringData);
using var cvarsFile = directory.OpenRead(path / CvarFile);
using var cvarsFile = fileReader.Open(FileCvars);
// Note, this does not invoke the received-initial-cvars event. But at least currently, that doesn't matter
var cvars = _confMan.LoadFromTomlStream(cvarsFile);

View File

@@ -13,23 +13,22 @@ namespace Robust.Client.Replays.Loading;
public sealed partial class ReplayLoadManager
{
public event Action<IWritableDirProvider, ResPath>? LoadOverride;
public event Action<IReplayFileReader>? LoadOverride;
public async void LoadAndStartReplay(IWritableDirProvider dir, ResPath path)
public async void LoadAndStartReplay(IReplayFileReader fileReader)
{
if (LoadOverride != null)
LoadOverride.Invoke(dir, path);
LoadOverride.Invoke(fileReader);
else
await LoadAndStartReplayAsync(dir, path);
await LoadAndStartReplayAsync(fileReader);
}
public async Task LoadAndStartReplayAsync(
IWritableDirProvider dir,
ResPath path,
IReplayFileReader fileReader,
LoadReplayCallback? callback = null)
{
callback ??= (_, _, _, _) => Task.CompletedTask;
var data = await LoadReplayAsync(dir, path, callback);
var data = await LoadReplayAsync(fileReader, callback);
await StartReplayAsync(data, callback);
}

View File

@@ -42,7 +42,7 @@ internal sealed class ReplayRecordingManager : SharedReplayRecordingManager
// Add information about the user doing the recording. This is used to set the default replay observer position
// when playing back the replay.
var guid = _player.LocalPlayer.UserId.UserId.ToString();
metadata[IReplayRecordingManager.Recorder] = new ValueDataNode(guid);
metadata[ReplayConstants.MetaKeyRecordedBy] = new ValueDataNode(guid);
}
private void OnRunLevelChanged(object? sender, RunLevelChangedEventArgs e)
@@ -63,6 +63,12 @@ internal sealed class ReplayRecordingManager : SharedReplayRecordingManager
public override void RecordClientMessage(object obj)
=> RecordReplayMessage(obj);
protected override string DefaultReplayFileName()
{
// Local time
return DateTime.Now.ToString(DefaultReplayNameFormat);
}
public override void RecordServerMessage(object obj)
{
// Do nothing.

View File

@@ -851,7 +851,6 @@ internal sealed partial class PvsSystem : EntitySystem
return leftView.Count > 0 ? leftView : null;
}
[MethodImpl(MethodImplOptions.AggressiveOptimization)]
private void RecursivelyAddTreeNode(in EntityUid nodeIndex,
RobustTree<EntityUid> tree,
Dictionary<EntityUid, PvsEntityVisibility>? lastAcked,
@@ -1057,8 +1056,8 @@ internal sealed partial class PvsSystem : EntitySystem
continue;
DebugTools.Assert(md.EntityLifeStage >= EntityLifeStage.Initialized);
DebugTools.Assert(md.EntityLastModifiedTick >= md.CreationTick);
DebugTools.Assert(md.EntityLastModifiedTick > fromTick);
DebugTools.Assert(md.EntityLastModifiedTick >= md.CreationTick || md.EntityLastModifiedTick == GameTick.Zero);
DebugTools.Assert(md.EntityLastModifiedTick > fromTick || md.EntityLastModifiedTick == GameTick.Zero);
stateEntities.Add(GetEntityState(player, uid, fromTick, md));
}
@@ -1069,8 +1068,8 @@ internal sealed partial class PvsSystem : EntitySystem
continue;
DebugTools.Assert(md.EntityLifeStage >= EntityLifeStage.Initialized);
DebugTools.Assert(md.EntityLastModifiedTick >= md.CreationTick);
DebugTools.Assert(md.EntityLastModifiedTick > fromTick);
DebugTools.Assert(md.EntityLastModifiedTick >= md.CreationTick || md.EntityLastModifiedTick == GameTick.Zero);
DebugTools.Assert(md.EntityLastModifiedTick > fromTick || md.EntityLastModifiedTick == GameTick.Zero);
stateEntities.Add(GetEntityState(player, uid, fromTick, md));
}
}

View File

@@ -1,3 +1,4 @@
using System;
using Robust.Server.GameStates;
using Robust.Server.Player;
using Robust.Shared;
@@ -24,6 +25,12 @@ internal sealed class ReplayRecordingManager : SharedReplayRecordingManager, ISe
NetConf.OnValueChanged(CVars.ReplayServerRecordingEnabled, SetReplayEnabled, true);
}
protected override string DefaultReplayFileName()
{
// UTC
return DateTime.UtcNow.ToString(DefaultReplayNameFormat);
}
public override void RecordServerMessage(object obj)
=> RecordReplayMessage(obj);

View File

@@ -3,6 +3,7 @@ using System.Threading.Tasks;
using System.Net;
using System.Text.Json.Nodes;
using Robust.Shared;
using Robust.Shared.Utility;
namespace Robust.Server.ServerStatus
{
@@ -108,50 +109,20 @@ namespace Robust.Server.ServerStatus
private JsonObject GetExternalBuildInfo()
{
var zipHash = _cfg.GetCVar(CVars.BuildHash);
var manifestHash = _cfg.GetCVar(CVars.BuildManifestHash);
var forkId = _cfg.GetCVar(CVars.BuildForkId);
var forkVersion = _cfg.GetCVar(CVars.BuildVersion);
var manifestDownloadUrl = Interpolate(_cfg.GetCVar(CVars.BuildManifestDownloadUrl));
var manifestUrl = Interpolate(_cfg.GetCVar(CVars.BuildManifestUrl));
var downloadUrl = Interpolate(_cfg.GetCVar(CVars.BuildDownloadUrl));
if (zipHash == "")
zipHash = null;
if (manifestHash == "")
manifestHash = null;
if (manifestDownloadUrl == "")
manifestDownloadUrl = null;
if (manifestUrl == "")
manifestUrl = null;
var buildInfo = GameBuildInformation.GetBuildInfoFromConfig(_cfg);
return new JsonObject
{
["engine_version"] = _cfg.GetCVar(CVars.BuildEngineVersion),
["fork_id"] = forkId,
["version"] = forkVersion,
["download_url"] = downloadUrl,
["hash"] = zipHash,
["engine_version"] = buildInfo.EngineVersion,
["fork_id"] = buildInfo.ForkId,
["version"] = buildInfo.Version,
["download_url"] = buildInfo.ZipDownload,
["hash"] = buildInfo.ZipHash,
["acz"] = false,
["manifest_download_url"] = manifestDownloadUrl,
["manifest_url"] = manifestUrl,
["manifest_hash"] = manifestHash
["manifest_download_url"] = buildInfo.ManifestDownloadUrl,
["manifest_url"] = buildInfo.ManifestUrl,
["manifest_hash"] = buildInfo.ManifestHash
};
string? Interpolate(string? value)
{
// Can't tell if splitting the ?. like this is more cursed than
// failing to align due to putting the full ?. on the next line
return value?
.Replace("{FORK_VERSION}", forkVersion)
.Replace("{FORK_ID}", forkId)
.Replace("{MANIFEST_HASH}", manifestHash)
.Replace("{ZIP_HASH}", zipHash);
}
}
private async Task<JsonObject?> PrepareACZBuildInfo()

View File

@@ -101,7 +101,7 @@ namespace Robust.Shared.Maths
/// <summary>
/// Adds scalar b to a and stores the result in a.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Add(Span<float> a, float b)
{
Add(a, b, a);
@@ -110,7 +110,7 @@ namespace Robust.Shared.Maths
/// <summary>
/// Adds scalar b to a and stores the result in s.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Add(ReadOnlySpan<float> a, float b, Span<float> s)
{
if (a.Length != s.Length)
@@ -125,7 +125,7 @@ namespace Robust.Shared.Maths
Add128(a, b, s);
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void AddScalar(ReadOnlySpan<float> a, float b, Span<float> s, int start, int end)
{
for (var i = start; i < end; i++)
@@ -134,7 +134,7 @@ namespace Robust.Shared.Maths
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Add128(ReadOnlySpan<float> a, float b, Span<float> s)
{
var remainder = a.Length & (Vector128<float>.Count - 1);
@@ -159,7 +159,7 @@ namespace Robust.Shared.Maths
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Add256(ReadOnlySpan<float> a, float b, Span<float> s)
{
var remainder = a.Length & (Vector256<float>.Count - 1);
@@ -191,7 +191,7 @@ namespace Robust.Shared.Maths
/// <summary>
/// Adds all elements of a and returns the value.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static float HorizontalAdd(ReadOnlySpan<float> a)
{
if (Vector256Enabled && LengthValid256Single(a.Length))
@@ -202,7 +202,7 @@ namespace Robust.Shared.Maths
return HorizontalAdd128(a);
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static float HorizontalAddScalar(ReadOnlySpan<float> a, int start, int end)
{
var sum = 0f;
@@ -215,7 +215,7 @@ namespace Robust.Shared.Maths
return sum;
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static float HorizontalAdd128(ReadOnlySpan<float> a)
{
var remainder = a.Length & 3;
@@ -242,7 +242,7 @@ namespace Robust.Shared.Maths
return sum;
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static float HorizontalAdd256(ReadOnlySpan<float> a)
{
var remainder = a.Length & 7;

View File

@@ -479,52 +479,51 @@ namespace Robust.Shared
/// </summary>
public static readonly CVarDef<string> BuildEngineVersion =
CVarDef.Create("build.engine_version",
typeof(CVars).Assembly.GetName().Version?.ToString(4) ?? String.Empty,
CVar.SERVER | CVar.REPLICATED);
typeof(CVars).Assembly.GetName().Version?.ToString(3) ?? String.Empty);
/// <summary>
/// Fork ID, as a hint to the launcher to manage local files.
/// This can be anything, it does not need a strict format.
/// </summary>
public static readonly CVarDef<string> BuildForkId =
CVarDef.Create("build.fork_id", "", CVar.SERVER | CVar.REPLICATED);
CVarDef.Create("build.fork_id", "");
/// <summary>
/// Version string, as a hint to the launcher to manage local files.
/// This can be anything, it does not need a strict format.
/// </summary>
public static readonly CVarDef<string> BuildVersion =
CVarDef.Create("build.version", "", CVar.SERVER | CVar.REPLICATED);
CVarDef.Create("build.version", "");
/// <summary>
/// Content pack the launcher should download to connect to this server.
/// </summary>
public static readonly CVarDef<string> BuildDownloadUrl =
CVarDef.Create("build.download_url", string.Empty, CVar.SERVERONLY);
CVarDef.Create("build.download_url", string.Empty);
/// <summary>
/// URL of the content manifest the launcher should download to connect to this server.
/// </summary>
public static readonly CVarDef<string> BuildManifestUrl =
CVarDef.Create("build.manifest_url", string.Empty, CVar.SERVERONLY);
CVarDef.Create("build.manifest_url", string.Empty);
/// <summary>
/// URL at which the launcher can download the manifest game files.
/// </summary>
public static readonly CVarDef<string> BuildManifestDownloadUrl =
CVarDef.Create("build.manifest_download_url", string.Empty, CVar.SERVERONLY);
CVarDef.Create("build.manifest_download_url", string.Empty);
/// <summary>
/// SHA-256 hash of the content pack hosted at <c>build.download_url</c>
/// </summary>
public static readonly CVarDef<string> BuildHash =
CVarDef.Create("build.hash", "", CVar.SERVERONLY);
CVarDef.Create("build.hash", "");
/// <summary>
/// SHA-256 hash of the manifest hosted at <c>build.manifest_url</c>
/// </summary>
public static readonly CVarDef<string> BuildManifestHash =
CVarDef.Create("build.manifest_hash", "", CVar.SERVERONLY);
CVarDef.Create("build.manifest_hash", "");
/*
* WATCHDOG
@@ -1440,6 +1439,11 @@ namespace Robust.Shared
public static readonly CVarDef<int> ReplayTickBatchSize = CVarDef.Create("replay.replay_tick_batchSize",
1024, CVar.ARCHIVE);
/// <summary>
/// The max amount of pending write commands while recording replays.
/// </summary>
public static readonly CVarDef<int> ReplayWriteChannelSize = CVarDef.Create("replay.write_channel_size", 5);
/// <summary>
/// Whether or not server-side replay recording is enabled.
/// </summary>
@@ -1483,6 +1487,16 @@ namespace Robust.Shared
/// </summary>
public static readonly CVarDef<bool> ReplayDynamicalScrubbing = CVarDef.Create("replay.dynamical_scrubbing", true);
/// <summary>
/// When recording replays,
/// should we attempt to make a valid content bundle that can be directly executed by the launcher?
/// </summary>
/// <remarks>
/// This requires the server's build information to be sufficiently filled out.
/// </remarks>
public static readonly CVarDef<bool> ReplayMakeContentBundle =
CVarDef.Create("replay.make_content_bundle", true);
/*
* CFG
*/

View File

@@ -1,8 +1,5 @@
using System;
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Robust.Shared.Utility;
@@ -107,25 +104,5 @@ namespace Robust.Shared.ContentPack
/// that opens up the screenshot directory using the operating system's file explorer.
/// </summary>
void OpenOsWindow(ResPath path);
/// <summary>
/// Asynchronously opens and writes the sequence of bytes to a file. If the file exists, its existing contents will
/// be replaced.
/// </summary>
Task WriteAllBytesAsync(ResPath path, byte[] bytes, CancellationToken cancellationToken = default);
/// <summary>
/// Asynchronously opens and writes the sequence of bytes to a file. If the file exists, its existing contents will
/// be replaced.
/// </summary>
Task WriteBytesAsync(ResPath path, byte[] bytes, int offset, int length,
CancellationToken cancellationToken = default);
/// <summary>
/// Asynchronously opens and writes the sequence of bytes to a file. If the file exists, its existing contents will
/// be replaced.
/// </summary>
Task WriteBytesAsync(ResPath patch, ReadOnlyMemory<byte> bytes,
CancellationToken cancellationToken = default);
}
}

View File

@@ -359,7 +359,12 @@ Types:
- "string ToUpper(string)"
System.IO.Compression:
CompressionMode: { } # Enum
CompressionLevel: { } # Enum
DeflateStream: { All: True }
ZipArchive: { All : True }
ZipArchiveEntry: { All : True }
ZipArchiveMode: { } # Enum
System.IO:
BinaryReader: { All: True }
FileAccess: { } # Enum

View File

@@ -2,8 +2,6 @@ using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Robust.Shared.Utility;
namespace Robust.Shared.ContentPack
@@ -215,24 +213,6 @@ namespace Robust.Shared.ContentPack
// Not valid for virtual directories. As this has no effect on the rest of the game no exception is thrown.
}
public async Task WriteAllBytesAsync(ResPath path, byte[] bytes, CancellationToken cancellationToken = default)
{
var file = Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
await file.WriteAsync(bytes, cancellationToken);
}
public async Task WriteBytesAsync(ResPath path, byte[] bytes, int offset, int length, CancellationToken cancellationToken = default)
{
var slice = new ReadOnlyMemory<byte>(bytes, offset, length);
await WriteBytesAsync(path, slice, cancellationToken);
}
public async Task WriteBytesAsync(ResPath path, ReadOnlyMemory<byte> bytes, CancellationToken cancellationToken = default)
{
var file = Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
await file.WriteAsync(bytes, cancellationToken);
}
private bool TryGetNodeAt(ResPath path, [NotNullWhen(true)] out INode? node)
{
if (!path.IsRooted)

View File

@@ -2,8 +2,6 @@
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Robust.Shared.Utility;
namespace Robust.Shared.ContentPack
@@ -170,24 +168,5 @@ namespace Robust.Shared.ContentPack
return Path.GetFullPath(Path.Combine(root, relPath));
}
public async Task WriteAllBytesAsync(ResPath path, byte[] bytes, CancellationToken cancellationToken = default)
{
var fullPath = GetFullPath(path);
await File.WriteAllBytesAsync(fullPath, bytes, cancellationToken);
}
public async Task WriteBytesAsync(ResPath path, byte[] bytes, int offset, int length, CancellationToken cancellationToken = default)
{
var slice = new ReadOnlyMemory<byte>(bytes, offset, length);
await WriteBytesAsync(path, slice, cancellationToken);
}
public async Task WriteBytesAsync(ResPath patch, ReadOnlyMemory<byte> bytes, CancellationToken cancellationToken = default)
{
var fullPath = GetFullPath(patch);
await using var fs = new FileStream(fullPath, FileMode.Create, FileAccess.Write, FileShare.None, 4096, true);
await fs.WriteAsync(bytes, cancellationToken);
}
}
}

View File

@@ -70,7 +70,7 @@ namespace Robust.Shared.GameObjects
// Every entity starts at tick 1, because they are conceptually created in the time between 0->1
[ViewVariables]
public GameTick EntityLastModifiedTick { get; internal set; } = new(1);
public GameTick EntityLastModifiedTick { get; internal set; } = GameTick.Zero;
/// <summary>
/// This is the tick at which the client last applied state data received from the server.

View File

@@ -1,5 +1,4 @@
using Prometheus;
using Robust.Shared.IoC;
using Robust.Shared.Log;
using Robust.Shared.Map;
using Robust.Shared.Profiling;
@@ -11,6 +10,7 @@ using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
using Robust.Shared.Physics;
using Robust.Shared.Serialization.Markdown.Mapping;
@@ -24,13 +24,13 @@ namespace Robust.Shared.GameObjects
{
#region Dependencies
[Dependency] protected readonly IPrototypeManager PrototypeManager = default!;
[Dependency] protected readonly ILogManager LogManager = default!;
[Dependency] private readonly IEntitySystemManager _entitySystemManager = default!;
[Dependency] private readonly IMapManager _mapManager = default!;
[Dependency] private readonly IGameTiming _gameTiming = default!;
[Dependency] private readonly ISerializationManager _serManager = default!;
[Dependency] private readonly ProfManager _prof = default!;
[IoC.Dependency] protected readonly IPrototypeManager PrototypeManager = default!;
[IoC.Dependency] protected readonly ILogManager LogManager = default!;
[IoC.Dependency] private readonly IEntitySystemManager _entitySystemManager = default!;
[IoC.Dependency] private readonly IMapManager _mapManager = default!;
[IoC.Dependency] private readonly IGameTiming _gameTiming = default!;
[IoC.Dependency] private readonly ISerializationManager _serManager = default!;
[IoC.Dependency] private readonly ProfManager _prof = default!;
// I feel like PJB might shed me for putting a system dependency here, but its required for setting entity
// positions on spawn....
@@ -347,13 +347,65 @@ namespace Robust.Shared.GameObjects
return newEntity;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public EntityUid[] SpawnEntities(EntityCoordinates coordinates, params string?[] protoNames)
{
var ents = new EntityUid[protoNames.Length];
for (var i = 0; i < protoNames.Length; i++)
{
ents[i] = SpawnEntity(protoNames[i], coordinates);
}
return ents;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public EntityUid[] SpawnEntities(MapCoordinates coordinates, params string?[] protoNames)
{
var ents = new EntityUid[protoNames.Length];
for (var i = 0; i < protoNames.Length; i++)
{
ents[i] = SpawnEntity(protoNames[i], coordinates);
}
return ents;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public EntityUid[] SpawnEntities(EntityCoordinates coordinates, List<string?> protoNames)
{
var ents = new EntityUid[protoNames.Count];
for (var i = 0; i < protoNames.Count; i++)
{
ents[i] = SpawnEntity(protoNames[i], coordinates);
}
return ents;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public EntityUid[] SpawnEntities(MapCoordinates coordinates, List<string?> protoNames)
{
var ents = new EntityUid[protoNames.Count];
for (var i = 0; i < protoNames.Count; i++)
{
ents[i] = SpawnEntity(protoNames[i], coordinates);
}
return ents;
}
/// <inheritdoc />
public virtual EntityUid SpawnEntity(string? protoName, EntityCoordinates coordinates, ComponentRegistry? overrides = null)
{
if (!coordinates.IsValid(this))
throw new InvalidOperationException($"Tried to spawn entity {protoName} on invalid coordinates {coordinates}.");
var entity = CreateEntityUninitialized(protoName, coordinates);
var entity = CreateEntityUninitialized(protoName, coordinates, overrides);
InitializeAndStartEntity(entity, coordinates.GetMapId(this));
return entity;
}

View File

@@ -15,7 +15,7 @@ namespace Robust.Shared.GameObjects
/// <param name="logMissing">Whether to log missing components.</param>
/// <typeparam name="TComp">The component type to resolve.</typeparam>
/// <returns>True if the component is not null or was resolved correctly, false if the component couldn't be resolved.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool Resolve<TComp>(EntityUid uid, [NotNullWhen(true)] ref TComp? component, bool logMissing = true)
where TComp : IComponent
{
@@ -42,7 +42,7 @@ namespace Robust.Shared.GameObjects
/// <typeparam name="TComp1">The component type to resolve.</typeparam>
/// <typeparam name="TComp2">The component type to resolve.</typeparam>
/// <returns>True if the components are not null or were resolved correctly, false if any of the component couldn't be resolved.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool Resolve<TComp1, TComp2>(EntityUid uid, [NotNullWhen(true)] ref TComp1? comp1, [NotNullWhen(true)] ref TComp2? comp2, bool logMissing = true)
where TComp1 : IComponent
where TComp2 : IComponent
@@ -62,7 +62,7 @@ namespace Robust.Shared.GameObjects
/// <typeparam name="TComp2">The component type to resolve.</typeparam>
/// <typeparam name="TComp3">The component type to resolve.</typeparam>
/// <returns>True if the components are not null or were resolved correctly, false if any of the component couldn't be resolved.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool Resolve<TComp1, TComp2, TComp3>(EntityUid uid, [NotNullWhen(true)] ref TComp1? comp1, [NotNullWhen(true)] ref TComp2? comp2, [NotNullWhen(true)] ref TComp3? comp3, bool logMissing = true)
where TComp1 : IComponent
where TComp2 : IComponent
@@ -85,7 +85,7 @@ namespace Robust.Shared.GameObjects
/// <typeparam name="TComp3">The component type to resolve.</typeparam>
/// <typeparam name="TComp4">The component type to resolve.</typeparam>
/// <returns>True if the components are not null or were resolved correctly, false if any of the component couldn't be resolved.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool Resolve<TComp1, TComp2, TComp3, TComp4>(EntityUid uid, [NotNullWhen(true)] ref TComp1? comp1, [NotNullWhen(true)] ref TComp2? comp2, [NotNullWhen(true)] ref TComp3? comp3, [NotNullWhen(true)] ref TComp4? comp4, bool logMissing = true)
where TComp1 : IComponent
where TComp2 : IComponent

View File

@@ -72,14 +72,14 @@ namespace Robust.Shared.GameObjects
return system;
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T>([NotNull] ref T? instance)
where T : IEntitySystem
{
_systemDependencyCollection.Resolve(ref instance);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T1, T2>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2)
where T1 : IEntitySystem
where T2 : IEntitySystem
@@ -87,7 +87,7 @@ namespace Robust.Shared.GameObjects
_systemDependencyCollection.Resolve(ref instance1, ref instance2);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T1, T2, T3>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2, [NotNull] ref T3? instance3)
where T1 : IEntitySystem
where T2 : IEntitySystem
@@ -96,7 +96,7 @@ namespace Robust.Shared.GameObjects
_systemDependencyCollection.Resolve(ref instance1, ref instance2, ref instance3);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T1, T2, T3, T4>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2, [NotNull] ref T3? instance3, [NotNull] ref T4? instance4)
where T1 : IEntitySystem
where T2 : IEntitySystem

View File

@@ -71,6 +71,14 @@ namespace Robust.Shared.GameObjects
void StartEntity(EntityUid entity);
EntityUid[] SpawnEntities(EntityCoordinates coordinates, params string?[] protoNames);
EntityUid[] SpawnEntities(MapCoordinates coordinates, params string?[] protoNames);
EntityUid[] SpawnEntities(EntityCoordinates coordinates, List<string?> protoNames);
EntityUid[] SpawnEntities(MapCoordinates coordinates, List<string?> protoNames);
/// <summary>
/// Spawns an initialized entity at the default location, using the given prototype.
/// </summary>

View File

@@ -332,21 +332,21 @@ namespace Robust.Shared.IoC
return (T)ResolveType(typeof(T));
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T>([NotNull] ref T? instance)
{
// Resolve<T>() will either throw or return a concrete instance, therefore we suppress the nullable warning.
instance ??= Resolve<T>()!;
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T1, T2>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2)
{
Resolve(ref instance1);
Resolve(ref instance2);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T1, T2, T3>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2,
[NotNull] ref T3? instance3)
{
@@ -354,7 +354,7 @@ namespace Robust.Shared.IoC
Resolve(ref instance3);
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Resolve<T1, T2, T3, T4>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2,
[NotNull] ref T3? instance3, [NotNull] ref T4? instance4)
{

View File

@@ -201,7 +201,7 @@ namespace Robust.Shared.IoC
}
/// <inheritdoc cref="Resolve{T}()"/>
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Resolve<T>([NotNull] ref T? instance)
{
// Do not call into IDependencyCollection immediately for this,
@@ -213,7 +213,7 @@ namespace Robust.Shared.IoC
/// <summary>
/// Resolve two dependencies manually.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Resolve<T1, T2>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2)
{
DebugTools.Assert(_container.IsValueCreated, NoContextAssert);
@@ -225,7 +225,7 @@ namespace Robust.Shared.IoC
/// <summary>
/// Resolve three dependencies manually.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Resolve<T1, T2, T3>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2, [NotNull] ref T3? instance3)
{
DebugTools.Assert(_container.IsValueCreated, NoContextAssert);
@@ -237,7 +237,7 @@ namespace Robust.Shared.IoC
/// <summary>
/// Resolve four dependencies manually.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.AggressiveInlining)]
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void Resolve<T1, T2, T3, T4>([NotNull] ref T1? instance1, [NotNull] ref T2? instance2, [NotNull] ref T3? instance3, [NotNull] ref T4? instance4)
{
DebugTools.Assert(_container.IsValueCreated, NoContextAssert);

View File

@@ -43,6 +43,18 @@ namespace Robust.Shared.Log
/// </summary>
void Log(LogLevel level, string message);
/// <summary>
/// Log a message as <see cref="LogLevel.Verbose"/>, taking in a format string and format list using the regular <see cref="Format" /> syntax.
/// </summary>
/// <seealso cref="Serilog.Log" />
void Verbose(string message, params object?[] args) => Log(LogLevel.Verbose, message, args);
/// <summary>
/// Log a message as <see cref="LogLevel.Verbose"/>.
/// </summary>
/// <seealso cref="Serilog.Log" />
void Verbose(string message) => Log(LogLevel.Verbose, message);
/// <summary>
/// Log a message as debug, taking in a format string and format list using the regular <see cref="Format" /> syntax.
/// </summary>

View File

@@ -12,7 +12,7 @@ namespace Robust.Shared.Map.Components
public sealed class MapComponent : Component
{
[ViewVariables(VVAccess.ReadWrite)]
[DataField(("lightingEnabled"))]
[DataField("lightingEnabled")]
public bool LightingEnabled { get; set; } = true;
[ViewVariables(VVAccess.ReadOnly)]

View File

@@ -1,4 +1,5 @@
using System.Collections.Generic;
using Robust.Shared.Random;
namespace Robust.Shared.Map
{
@@ -7,6 +8,14 @@ namespace Robust.Shared.Map
/// </summary>
public interface ITileDefinitionManager : IEnumerable<ITileDefinition>
{
Tile GetVariantTile(string name, IRobustRandom random);
Tile GetVariantTile(string name, System.Random random);
Tile GetVariantTile(ITileDefinition tileDef, IRobustRandom random);
Tile GetVariantTile(ITileDefinition tileDef, System.Random random);
/// <summary>
/// Indexer to retrieve a tile definition by name.
/// Note: In the presence of tile aliases, this[A].ID does not necessarily equal A.

View File

@@ -2,6 +2,7 @@
using System.Collections.Generic;
using Robust.Shared.IoC;
using Robust.Shared.Prototypes;
using Robust.Shared.Random;
namespace Robust.Shared.Map
{
@@ -12,7 +13,6 @@ namespace Robust.Shared.Map
protected readonly List<ITileDefinition> TileDefs;
private readonly Dictionary<string, ITileDefinition> _tileNames;
private readonly Dictionary<string, List<string>> _awaitingAliases;
private readonly Dictionary<ITileDefinition, ushort> _tileIds;
/// <summary>
/// Default Constructor.
@@ -21,7 +21,6 @@ namespace Robust.Shared.Map
{
TileDefs = new List<ITileDefinition>();
_tileNames = new Dictionary<string, ITileDefinition>();
_tileIds = new Dictionary<ITileDefinition, ushort>();
_awaitingAliases = new();
}
@@ -45,7 +44,6 @@ namespace Robust.Shared.Map
tileDef.AssignTileId(id);
TileDefs.Add(tileDef);
_tileNames[name] = tileDef;
_tileIds[tileDef] = id;
AliasingHandleDeferred(name);
}
@@ -87,6 +85,28 @@ namespace Robust.Shared.Map
}
}
public Tile GetVariantTile(string name, IRobustRandom random)
{
var tileDef = this[name];
return GetVariantTile(tileDef, random);
}
public Tile GetVariantTile(string name, System.Random random)
{
var tileDef = this[name];
return GetVariantTile(tileDef, random);
}
public Tile GetVariantTile(ITileDefinition tileDef, IRobustRandom random)
{
return new Tile(tileDef.TileId, variant: random.NextByte(tileDef.Variants));
}
public Tile GetVariantTile(ITileDefinition tileDef, System.Random random)
{
return new Tile(tileDef.TileId, variant: random.NextByte(tileDef.Variants));
}
public ITileDefinition this[string name] => _tileNames[name];
public ITileDefinition this[int id] => TileDefs[id];

View File

@@ -37,6 +37,9 @@ namespace Robust.Shared.Network
{
try
{
_logger.Verbose($"{connection.RemoteEndPoint}: Starting handshake with peer ");
_logger.Verbose($"{connection.RemoteEndPoint}: Awaiting MsgLoginStart");
var incPacket = await AwaitData(connection);
var msgLogin = new MsgLoginStart();
@@ -48,6 +51,13 @@ namespace Robust.Shared.Network
var needPk = msgLogin.NeedPubKey;
var authServer = _config.GetCVar(CVars.AuthServer);
_logger.Verbose(
$"{connection.RemoteEndPoint}: Received MsgLoginStart. " +
$"canAuth: {canAuth}, needPk: {needPk}, username: {msgLogin.UserName}, encrypt: {msgLogin.Encrypt}");
_logger.Verbose(
$"{connection.RemoteEndPoint}: Connection is specialized local? {isLocal} ");
if (Auth == AuthMode.Required && !isLocal)
{
if (!canAuth)
@@ -64,6 +74,9 @@ namespace Robust.Shared.Network
if (canAuth && Auth != AuthMode.Disabled)
{
_logger.Verbose(
$"{connection.RemoteEndPoint}: Initiating authentication");
var verifyToken = new byte[4];
RandomNumberGenerator.Fill(verifyToken);
var msgEncReq = new MsgEncryptionRequest
@@ -78,11 +91,17 @@ namespace Robust.Shared.Network
msgEncReq.WriteToBuffer(outMsgEncReq, _serializer);
peer.Peer.SendMessage(outMsgEncReq, connection, NetDeliveryMethod.ReliableOrdered);
_logger.Verbose(
$"{connection.RemoteEndPoint}: Awaiting MsgEncryptionResponse");
incPacket = await AwaitData(connection);
var msgEncResponse = new MsgEncryptionResponse();
msgEncResponse.ReadFromBuffer(incPacket, _serializer);
_logger.Verbose(
$"{connection.RemoteEndPoint}: Received MsgEncryptionResponse");
var encResp = new byte[verifyToken.Length + SharedKeyLength];
var ret = CryptoBox.SealOpen(
encResp,
@@ -112,6 +131,9 @@ namespace Robust.Shared.Network
if (msgLogin.Encrypt)
encryption = new NetEncryption(sharedSecret, isServer: true);
_logger.Verbose(
$"{connection.RemoteEndPoint}: Checking with session server for auth hash...");
var authHashBytes = MakeAuthHash(sharedSecret, CryptoPublicKey!);
var authHash = Base64Helpers.ConvertToBase64Url(authHashBytes);
@@ -124,6 +146,12 @@ namespace Robust.Shared.Network
return;
}
_logger.Verbose(
$"{connection.RemoteEndPoint}: Auth hash passed. " +
$"User ID: {joinedRespJson.UserData!.UserId}, " +
$"Username: {joinedRespJson.UserData!.UserName}," +
$"Patron: {joinedRespJson.UserData.PatronTier}");
var userId = new NetUserId(joinedRespJson.UserData!.UserId);
userData = new NetUserData(userId, joinedRespJson.UserData.UserName)
{
@@ -135,6 +163,9 @@ namespace Robust.Shared.Network
}
else
{
_logger.Verbose(
$"{connection.RemoteEndPoint}: Not doing authentication");
var reqUserName = msgLogin.UserName;
if (!UsernameHelpers.IsNameValid(reqUserName, out var reason))
@@ -157,15 +188,27 @@ namespace Robust.Shared.Network
name = $"{origName}_{++iterations}";
}
_logger.Verbose(
$"{connection.RemoteEndPoint}: Assigned name: {name}");
NetUserId userId;
(userId, type) = await AssignUserIdAsync(name);
_logger.Verbose(
$"{connection.RemoteEndPoint}: Assigned user ID: {userId}");
userData = new NetUserData(userId, name)
{
HWId = msgLogin.HWId
};
}
_logger.Verbose(
$"{connection.RemoteEndPoint}: Login type: {type}");
_logger.Verbose(
$"{connection.RemoteEndPoint}: Raising Connecting event");
var endPoint = connection.RemoteEndPoint;
var connect = await OnConnecting(endPoint, userData, type);
if (connect.IsDenied)
@@ -174,9 +217,15 @@ namespace Robust.Shared.Network
return;
}
_logger.Verbose(
$"{connection.RemoteEndPoint}: Connecting event passed, client is IN");
// Well they're in. Kick a connected client with the same GUID if we have to.
if (_assignedUserIds.TryGetValue(userData.UserId, out var existing))
{
_logger.Verbose(
$"{connection.RemoteEndPoint}: User was already connected in another connection, disconnecting");
if (_awaitingDisconnectToConnect.Contains(userData.UserId))
{
connection.Disconnect("Stop trying to connect multiple times at once.");
@@ -188,7 +237,14 @@ namespace Robust.Shared.Network
{
existing.Disconnect("Another connection has been made with your account.");
// Have to wait until they're properly off the server to avoid any collisions.
_logger.Verbose(
$"{connection.RemoteEndPoint}: Awaiting for clean disconnect of previous client");
await AwaitDisconnectAsync(existing);
_logger.Verbose(
$"{connection.RemoteEndPoint}: Previous client disconnected");
}
finally
{
@@ -205,6 +261,8 @@ namespace Robust.Shared.Network
return;
}
_logger.Verbose($"{connection.RemoteEndPoint}: Sending MsgLoginSuccess");
var msg = peer.Peer.CreateMessage();
var msgResp = new MsgLoginSuccess
{

View File

@@ -737,6 +737,8 @@ namespace Robust.Shared.Network
NetEncryption? encryption,
LoginType loginType)
{
_logger.Verbose($"{sender.RemoteEndPoint}: Initial handshake complete!");
var channel = new NetChannel(this, sender, userData, loginType);
_assignedUserIds.Add(userData.UserId, sender);
_assignedUsernames.Add(userData.UserName, sender);
@@ -891,12 +893,12 @@ namespace Robust.Shared.Network
}
catch (InvalidCastException ice)
{
_logger.Error($"{msg.SenderConnection.RemoteEndPoint}: Wrong deserialization of {type.Name} packet: {ice.Message}");
_logger.Error($"{msg.SenderConnection.RemoteEndPoint}: Wrong deserialization of {type.Name} packet:\n{ice}");
return true;
}
catch (Exception e) // yes, we want to catch ALL exeptions for security
{
_logger.Warning($"{msg.SenderConnection.RemoteEndPoint}: Failed to deserialize {type.Name} packet: {e.Message}");
_logger.Error($"{msg.SenderConnection.RemoteEndPoint}: Failed to deserialize {type.Name} packet:\n{e}");
return true;
}

View File

@@ -104,7 +104,7 @@ namespace Robust.Shared.Physics
public int MaxBalance
{
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
get
{
var maxBal = 0;
@@ -130,7 +130,7 @@ namespace Robust.Shared.Physics
public float AreaRatio
{
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
get
{
if (_root == Proxy.Free)
@@ -387,7 +387,7 @@ namespace Robust.Shared.Physics
return _nodes[proxy].Aabb;
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
private void RemoveLeaf(Proxy leaf)
{
if (leaf == _root)
@@ -562,7 +562,7 @@ namespace Robust.Shared.Physics
return cost;
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
private void Balance(Proxy index)
{
while (index != Proxy.Free)
@@ -755,7 +755,7 @@ namespace Robust.Shared.Physics
/// <summary>
/// Compute the height of a sub-tree.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
private int ComputeHeight(Proxy proxy)
{
ref var node = ref _nodes[proxy];
@@ -770,7 +770,7 @@ namespace Robust.Shared.Physics
) + 1;
}
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
public void RebuildBottomUp(int free = 0)
{
var proxies = new Proxy[NodeCount + free];
@@ -909,7 +909,6 @@ namespace Robust.Shared.Physics
public delegate void FastQueryCallback(ref T userData);
[MethodImpl(MethodImplOptions.AggressiveOptimization)]
public void FastQuery(ref Box2 aabb, FastQueryCallback callback)
{
var stack = new GrowableStack<Proxy>(stackalloc Proxy[256]);

View File

@@ -175,7 +175,7 @@ namespace Robust.Shared.Physics
bool ICollection<T>.Remove(T item)
=> Remove(item);
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
[MethodImpl(MethodImplOptions.NoInlining)]
public bool Update(in T item, Box2? newBox = null)
{
if (!TryGetProxy(item, out var proxy))

View File

@@ -57,20 +57,11 @@ public interface IBroadPhase<T> : ICollection<T> where T : notnull {
int Capacity { get; }
int Height {
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get;
}
int Height { get; }
int MaxBalance {
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
get;
}
int MaxBalance { get; }
float AreaRatio {
[MethodImpl(MethodImplOptions.AggressiveOptimization | MethodImplOptions.NoInlining)]
get;
}
float AreaRatio { get; }
bool Add(in T item, Box2? newAABB = null);

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using Robust.Shared.Collections;
using Robust.Shared.Maths;
@@ -78,6 +79,24 @@ public interface IRobustRandom
(list[k], list[n]) = (list[n], list[k]);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public byte NextByte(byte maxValue)
{
return NextByte(0, maxValue);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public byte NextByte()
{
return NextByte(byte.MaxValue);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public byte NextByte(byte minValue, byte maxValue)
{
return (byte) Next(minValue, maxValue);
}
}
public static class RandomHelpers
@@ -97,4 +116,22 @@ public static class RandomHelpers
{
return random.NextDouble() < chance;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static byte NextByte(this System.Random random, byte maxValue)
{
return NextByte(random, 0, maxValue);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static byte NextByte(this System.Random random)
{
return NextByte(random, byte.MaxValue);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static byte NextByte(this System.Random random, byte minValue, byte maxValue)
{
return (byte) random.Next(minValue, maxValue);
}
}

View File

@@ -111,54 +111,11 @@ public interface IReplayRecordingManager
/// </summary>
(float Minutes, int Ticks, float Size, float UncompressedSize) GetReplayStats();
/// <summary>
/// Check the status of all async write tasks and return true if one of the tasks is still writing something.
/// </summary>
bool IsWriting();
/// <summary>
/// Returns a task that will wait for all the current writing tasks to finish.
/// </summary>
/// <exception cref="Exception">
/// Throws an exception if <see cref="IsRecording"/> is true (i.e., new write tasks as still being created).
/// <exception cref="InvalidOperationException">
/// Thrown if we are currently recording (<see cref="IsRecording"/> true).
/// </exception>
Task WaitWriteTasks();
// Define misc constants for writing and reading replays files.
# region Constants
/// <summary>
/// File extension for data files that have to be deserialized and decompressed.
/// </summary>
public const string Ext = "dat";
// filenames
public static readonly ResPath MetaFile = new($"replay.yml");
public static readonly ResPath CvarFile = new($"cvars.toml");
public static readonly ResPath StringsFile = new($"strings.{Ext}");
public static readonly ResPath InitFile = new($"init.{Ext}");
// Yaml keys
public const string Hash = "typeHash";
public const string CompHash = "componentHash";
public const string Strings = "stringHash";
public const string Time = "time";
public const string Name = "name";
public const string Tick = "serverStartTime";
public const string ServerTime = "startTick";
public const string BaseTick = "timeBaseTick";
public const string BaseTime = "timeBaseTimespan";
public const string Duration = "duration";
public const string Engine = "engineVersion";
public const string Fork = "buildForkId";
public const string ForkVersion = "buildVersion";
public const string FileCount = "fileCount";
public const string Compressed = "size";
public const string Uncompressed = "uncompressedSize";
public const string EndTick = "endTick";
public const string EndTime = "serverEndTime";
public const string IsClient = "clientRecording";
public const string Recorder = "recordedBy";
#endregion
}

View File

@@ -0,0 +1,158 @@
using Robust.Shared.GameObjects;
using Robust.Shared.Serialization;
using Robust.Shared.Timing;
using Robust.Shared.Utility;
namespace Robust.Shared.Replays;
/// <summary>
/// Contains various constants related to the replay recording subsystem.
/// </summary>
public static class ReplayConstants
{
/// <summary>
/// File extension for data files that have to be deserialized and decompressed.
/// </summary>
public const string Ext = "dat";
/// <summary>
/// Prefix used by serialized data messages.
/// </summary>
public const string DataFilePrefix = "data_";
// file names
/// <summary>
/// File that contains primary replay metadata.
/// </summary>
public static readonly ResPath FileMeta = new($"replay.yml");
/// <summary>
/// File that contains final replay metadata written at the end of a successful recording.
/// </summary>
public static readonly ResPath FileMetaFinal = new($"replay_final.yml");
/// <summary>
/// File that contains CVars at the start of a recording.
/// </summary>
public static readonly ResPath FileCvars = new($"cvars.toml");
/// <summary>
/// File that contains the serialization string map (<see cref="IRobustMappedStringSerializer"/>).
/// </summary>
public static readonly ResPath FileStrings = new($"strings.{Ext}");
/// <summary>
/// File that contains extra initialization objects provided by content.
/// </summary>
public static readonly ResPath FileInit = new($"init.{Ext}");
/// <summary>
/// Folder inside replay zip files that replay data is contained in.
/// </summary>
public static readonly ResPath ReplayZipFolder = new("_replay");
// Keys for the YAML data in replay.yml
/// <summary>
/// Type hash from <see cref="IRobustSerializer"/>.
/// </summary>
public const string MetaKeyTypeHash = "typeHash";
/// <summary>
/// Component hash from <see cref="IComponentFactory"/>.
/// </summary>
public const string MetaKeyComponentHash = "componentHash";
/// <summary>
/// String hash from <see cref="IRobustMappedStringSerializer"/>.
/// </summary>
public const string MetaKeyStringHash = "stringHash";
/// <summary>
/// Time the recording was started, in UTC.
/// </summary>
public const string MetaKeyTime = "time";
/// <summary>
/// The name of the recording.
/// </summary>
public const string MetaKeyName = "name";
/// <summary>
/// The tick the recording was started at.
/// </summary>
public const string MetaKeyStartTick = "startTick";
/// <summary>
/// The server time the recording was started at.
/// </summary>
public const string MetaKeyStartTime = "startTime";
/// <summary>
/// The base tick from <see cref="IGameTiming"/> when the recording was started.
/// </summary>
public const string MetaKeyBaseTick = "timeBaseTick";
/// <summary>
/// The base time from <see cref="IGameTiming"/> when the recording was started.
/// </summary>
public const string MetaKeyBaseTime = "timeBaseTime";
/// <summary>
/// The engine version that was recorded on.
/// </summary>
public const string MetaKeyEngineVersion = "engineVersion";
/// <summary>
/// The build fork ID that was recorded on.
/// </summary>
public const string MetaKeyForkId = "buildForkId";
/// <summary>
/// The build fork version that was recorded on.
/// </summary>
public const string MetaKeyForkVersion = "buildForkVersion";
/// <summary>
/// Is this a client-side recording?
/// </summary>
public const string MetaKeyIsClientRecording = "isClientRecording";
/// <summary>
/// If this is a client recording, what is the User ID player.
/// </summary>
public const string MetaKeyRecordedBy = "recordedBy";
// Keys for the YAML data in replay_final.yml
/// <summary>
/// How many individual data files have been recorded in total.
/// </summary>
public const string MetaFinalKeyFileCount = "fileCount";
/// <summary>
/// Length of the recording.
/// </summary>
public const string MetaFinalKeyDuration = "duration";
/// <summary>
/// Compressed total size of the replay data files.
/// </summary>
public const string MetaFinalKeyCompressedSize = "size";
/// <summary>
/// Uncompressed total size of the replay data files.
/// </summary>
public const string MetaFinalKeyUncompressedSize = "uncompressedSize";
/// <summary>
/// Tick the recording ends at.
/// </summary>
public const string MetaFinalKeyEndTick = "endTick";
/// <summary>
/// Time the recording ends at.
/// </summary>
public const string MetaFinalKeyEndTime = "endTime";
}

View File

@@ -112,7 +112,7 @@ public sealed class ReplayData
ClientSideRecording = clientSideRecording;
YamlData = yamlData;
if (YamlData.TryGet(new ValueDataNode(IReplayRecordingManager.Recorder), out ValueDataNode? node)
if (YamlData.TryGet(new ValueDataNode(ReplayConstants.MetaKeyRecordedBy), out ValueDataNode? node)
&& Guid.TryParse(node.Value, out var guid))
{
Recorder = new NetUserId(guid);

View File

@@ -2,10 +2,12 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using Robust.Shared.ContentPack;
using System.IO.Compression;
using System.Threading.Channels;
using Robust.Shared.Serialization;
using Robust.Shared.Utility;
using System.Threading.Tasks;
using Robust.Shared.Log;
using YamlDotNet.Core;
using YamlDotNet.RepresentationModel;
@@ -14,95 +16,132 @@ namespace Robust.Shared.Replays;
// This partial class has various methods for async file writing (in case the path is on a networked drive or something like that)
internal abstract partial class SharedReplayRecordingManager
{
private List<Task> _writeTasks = new();
// To avoid stuttering the main thread, we do IO (and writing to the zip in general) in the thread pool.
private void WriteYaml(YamlDocument data, IWritableDirProvider dir, ResPath path)
// While recording a replay, the Task for the write queue is stored in the RecordingState.
// However when the replay recording gets finished, we immediately clear _recState before the write queue is finished.
// As such, we need to track the task here.
// In practice, this list will most likely never contain more than a single element,
// and even then not for much longer than a couple hundred ms at most.
private readonly List<Task> _finalizingWriteTasks = new();
private static void WriteYaml(RecordingState state, ResPath path, YamlDocument data)
{
var memStream = new MemoryStream();
using var writer = new StreamWriter(memStream);
var yamlStream = new YamlStream { data };
yamlStream.Save(new YamlMappingFix(new Emitter(writer)), false);
writer.Flush();
var task = Task.Run(() => dir.WriteAllBytesAsync(path, memStream.ToArray()));
_writeTasks.Add(task);
WriteBytes(state, path, memStream.AsMemory());
}
private void WriteSerializer<T>(T obj, IWritableDirProvider dir, ResPath path)
private void WriteSerializer<T>(RecordingState state, ResPath path, T obj)
{
var memStream = new MemoryStream();
_serializer.SerializeDirect(memStream, obj);
var task = Task.Run(() => dir.WriteAllBytesAsync(path, memStream.ToArray()));
_writeTasks.Add(task);
WriteBytes(state, path, memStream.AsMemory());
}
private void WriteBytes(byte[] bytes, IWritableDirProvider dir, ResPath path)
private static void WritePooledBytes(
RecordingState state,
ResPath path,
byte[] bytes,
int length,
CompressionLevel compression)
{
var task = Task.Run(() => dir.WriteAllBytesAsync(path, bytes));
_writeTasks.Add(task);
}
DebugTools.Assert(path.IsRelative, "Zip path should be relative");
private void WritePooledBytes(byte[] bytes, int length, IWritableDirProvider dir, ResPath path)
{
var task = Task.Run(() => Write(bytes, length, dir, path));
_writeTasks.Add(task);
static async Task Write(byte[] bytes, int length, IWritableDirProvider dir, ResPath path)
WriteQueueTask(state, () =>
{
try
{
var slice = new ReadOnlyMemory<byte>(bytes, 0, length);
await dir.WriteBytesAsync(path, slice);
var entry = state.Zip.CreateEntry(path.ToString(), compression);
using var stream = entry.Open();
stream.Write(bytes, 0, length);
}
finally
{
ArrayPool<byte>.Shared.Return(bytes);
}
}
});
}
private void WriteToml(IEnumerable<string> enumerable, IWritableDirProvider dir, ResPath path)
private void WriteToml(RecordingState state, IEnumerable<string> enumerable, ResPath path)
{
var memStream = new MemoryStream();
NetConf.SaveToTomlStream(memStream, enumerable);
var task = Task.Run(() => dir.WriteAllBytesAsync(path, memStream.ToArray()));
_writeTasks.Add(task);
WriteBytes(state, path, memStream.AsMemory());
}
protected bool UpdateWriteTasks()
private static void WriteBytes(
RecordingState recState,
ResPath path,
ReadOnlyMemory<byte> bytes,
CompressionLevel compression = CompressionLevel.Optimal)
{
bool isWriting = false;
for (var i = _writeTasks.Count - 1; i >= 0; i--)
DebugTools.Assert(path.IsRelative, "Zip path should be relative");
WriteQueueTask(recState, () =>
{
var task = _writeTasks[i];
switch(task.Status)
var entry = recState.Zip.CreateEntry(path.ToString(), compression);
using var stream = entry.Open();
stream.Write(bytes.Span);
});
}
private static void WriteQueueTask(RecordingState recState, Action a)
{
var task = recState.WriteCommandChannel.WriteAsync(a);
// If we have to wait here, it's because the channel is full.
// Synchronous waiting is safe here: the writing code doesn't rely on the synchronization context.
if (!task.IsCompletedSuccessfully)
task.AsTask().Wait();
}
protected void UpdateWriteTasks()
{
if (_recState != null)
{
// We are actively recording a replay. Check the status of the write task to make sure nothing went wrong.
if (_recState.WriteTask.IsFaulted)
{
case TaskStatus.Canceled:
case TaskStatus.RanToCompletion:
_writeTasks.RemoveSwap(i);
break;
_sawmill.Log(
LogLevel.Error,
_recState.WriteTask.Exception,
"Write task failed while recording due to exception, aborting recording!");
case TaskStatus.Faulted:
var ex = task.Exception;
_sawmill.Error($"Replay write task encountered a fault. Rethrowing exception");
Reset();
throw ex!;
case TaskStatus.Created:
Reset();
throw new Exception("A replay write task was never started?");
default:
isWriting = true;
break;
Reset();
}
else if (_recState.WriteTask.IsCompleted)
{
// This shouldn't be possible since the write task only exits if we close the channel,
// which we only do while clearing _recState.
_sawmill.Error("Write task completed, but did not report an error?");
}
}
return isWriting;
}
for (var i = _finalizingWriteTasks.Count - 1; i >= 0; i--)
{
var task = _finalizingWriteTasks[i];
if (task.IsCompletedSuccessfully)
{
_sawmill.Debug("Write task finalized cleanly");
}
else if (task.IsFaulted)
{
_sawmill.Log(
LogLevel.Error,
task.Exception,
"Write task hit exception while finalizing, replay may have been corrupted!");
}
public bool IsWriting() => UpdateWriteTasks();
if (task.IsCompleted)
_finalizingWriteTasks.RemoveSwap(i);
}
}
public Task WaitWriteTasks()
{
@@ -112,6 +151,26 @@ internal abstract partial class SharedReplayRecordingManager
// First, check for any tasks that have encountered errors.
UpdateWriteTasks();
return Task.WhenAll(_writeTasks);
return Task.WhenAll(_finalizingWriteTasks);
}
private static async Task WriteQueueLoop(ChannelReader<Action> reader, ZipArchive archive)
{
try
{
while (true)
{
var result = await reader.WaitToReadAsync();
if (!result)
break;
var action = await reader.ReadAsync();
action();
}
}
finally
{
archive.Dispose();
}
}
}

View File

@@ -15,10 +15,15 @@ using System.Buffers;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Threading.Channels;
using System.Threading.Tasks;
using Robust.Shared.Network;
using YamlDotNet.RepresentationModel;
using static Robust.Shared.Replays.IReplayRecordingManager;
using static Robust.Shared.Replays.ReplayConstants;
namespace Robust.Shared.Replays;
@@ -32,35 +37,29 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
[Dependency] private readonly IComponentFactory _factory = default!;
[Dependency] private readonly IRobustSerializer _serializer = default!;
[Dependency] private readonly INetManager _netMan = default!;
[Dependency] private readonly ILogManager _logManager = default!;
public event Action<MappingDataNode, List<object>>? RecordingStarted;
public event Action<MappingDataNode>? RecordingStopped;
public event Action<IWritableDirProvider, ResPath>? RecordingFinished;
private ISawmill _sawmill = default!;
private List<object> _queuedMessages = new();
// Config variables.
private int _maxCompressedSize;
private int _maxUncompressedSize;
private int _tickBatchSize;
private bool _enabled;
public bool IsRecording => _replay != null;
private (MemoryStream Stream, ZStdCompressionContext Context)? _replay;
private int _index = 0;
private int _currentCompressedSize;
private int _currentUncompressedSize;
private (GameTick Tick, TimeSpan Time) _recordingStart;
private TimeSpan? _recordingEnd;
private MappingDataNode? _yamlMetadata;
private (IWritableDirProvider, ResPath)? _directory;
public bool IsRecording => _recState != null;
private RecordingState? _recState;
/// <inheritdoc/>
public virtual void Initialize()
{
_sawmill = Logger.GetSawmill("replay");
_sawmill = _logManager.GetSawmill("replay");
NetConf.OnValueChanged(CVars.ReplayMaxCompressedSize, (v) => _maxCompressedSize = v * 1024, true);
NetConf.OnValueChanged(CVars.ReplayMaxUncompressedSize, (v) => _maxUncompressedSize = v * 1024, true);
NetConf.OnValueChanged(CVars.ReplayTickBatchSize, (v) => _tickBatchSize = v * 1024, true);
@@ -74,8 +73,8 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
private void OnCompressionChanged(int value)
{
if (_replay is var (_, context))
context.SetParameter(ZSTD_cParameter.ZSTD_c_compressionLevel, value);
// Update compression level on running replay.
_recState?.CompressionContext.SetParameter(ZSTD_cParameter.ZSTD_c_compressionLevel, value);
}
public void SetReplayEnabled(bool value)
@@ -89,7 +88,7 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
/// <inheritdoc/>
public void StopRecording()
{
if (_replay == null)
if (!IsRecording)
return;
try
@@ -110,20 +109,20 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
{
UpdateWriteTasks();
if (state == null || _replay is not var (stream, _))
if (state == null || _recState == null)
return;
try
{
_serializer.SerializeDirect(stream, state);
_serializer.SerializeDirect(stream, new ReplayMessage { Messages = _queuedMessages });
_serializer.SerializeDirect(_recState.Buffer, state);
_serializer.SerializeDirect(_recState.Buffer, new ReplayMessage { Messages = _queuedMessages });
_queuedMessages.Clear();
bool continueRecording = _recordingEnd == null || _recordingEnd.Value >= Timing.CurTime;
bool continueRecording = _recState.EndTime == null || _recState.EndTime.Value >= Timing.CurTime;
if (!continueRecording)
_sawmill.Info("Reached requested replay recording length. Stopping recording.");
if (!continueRecording || stream.Length > _tickBatchSize)
if (!continueRecording || _recState.Buffer.Length > _tickBatchSize)
WriteGameState(continueRecording);
}
catch (Exception e)
@@ -146,47 +145,71 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
// If the previous recording had exceptions, throw them now before starting a new recording.
UpdateWriteTasks();
ResPath subDir;
if (name == null)
{
name = DateTime.UtcNow.ToString(DefaultReplayNameFormat);
subDir = new ResPath(name);
}
else
{
subDir = new ResPath(name).Clean();
if (subDir == ResPath.Root || subDir == ResPath.Empty || subDir == ResPath.Self)
subDir = new ResPath(DateTime.UtcNow.ToString(DefaultReplayNameFormat));
}
name ??= DefaultReplayFileName();
var filePath = new ResPath(name).Clean();
if (filePath.Extension != "zip")
filePath = filePath.WithName(filePath.Filename + ".zip");
var basePath = new ResPath(NetConf.GetCVar(CVars.ReplayDirectory)).ToRootedPath();
subDir = basePath / subDir.ToRelativePath();
filePath = basePath / filePath.ToRelativePath();
if (directory.Exists(subDir))
// Make sure to create parent directory.
directory.CreateDir(filePath.Directory);
if (directory.Exists(filePath))
{
if (overwrite)
{
_sawmill.Info($"Replay folder {subDir} already exists. Overwriting.");
directory.Delete(subDir);
_sawmill.Info($"Replay file {filePath} already exists. Overwriting.");
directory.Delete(filePath);
}
else
{
_sawmill.Info($"Replay folder {subDir} already exists. Aborting recording.");
_sawmill.Info($"Replay file {filePath} already exists. Aborting recording.");
return false;
}
}
directory.CreateDir(subDir);
_directory = (directory, subDir);
var file = directory.Open(filePath, FileMode.Create, FileAccess.Write, FileShare.None);
var zip = new ZipArchive(file, ZipArchiveMode.Create);
var context = new ZStdCompressionContext();
context.SetParameter(ZSTD_cParameter.ZSTD_c_compressionLevel, NetConf.GetCVar(CVars.NetPVSCompressLevel));
_replay = (new MemoryStream(_tickBatchSize * 2), context);
_index = 0;
_recordingStart = (Timing.CurTick, Timing.CurTime);
var buffer = new MemoryStream(_tickBatchSize * 2);
TimeSpan? recordingEnd = null;
if (duration != null)
recordingEnd = Timing.CurTime + duration.Value;
var commandQueue = Channel.CreateBounded<Action>(
new BoundedChannelOptions(NetConf.GetCVar(CVars.ReplayWriteChannelSize))
{
SingleReader = true,
SingleWriter = true,
AllowSynchronousContinuations = false
}
);
var writeTask = Task.Run(() => WriteQueueLoop(commandQueue.Reader, zip));
_recState = new RecordingState(
zip,
buffer,
context,
Timing.CurTick,
Timing.CurTime,
recordingEnd,
commandQueue.Writer,
writeTask,
directory,
filePath
);
try
{
WriteInitialMetadata(name);
WriteContentBundleInfo(_recState);
WriteInitialMetadata(name, _recState);
}
catch
{
@@ -194,14 +217,13 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
throw;
}
if (duration != null)
_recordingEnd = Timing.CurTime + duration.Value;
_sawmill.Info("Started recording replay...");
UpdateWriteTasks();
return true;
}
protected abstract string DefaultReplayFileName();
public abstract void RecordServerMessage(object obj);
public abstract void RecordClientMessage(object obj);
@@ -216,137 +238,230 @@ internal abstract partial class SharedReplayRecordingManager : IReplayRecordingM
private void WriteGameState(bool continueRecording = true)
{
if (_replay is not var (stream, context) || _directory is not var (dir, path))
return;
DebugTools.Assert(_recState != null);
stream.Position = 0;
_recState.Buffer.Position = 0;
// Compress stream to buffer.
// First 4 bytes of buffer are reserved for the length of the uncompressed stream.
var bound = ZStd.CompressBound((int) stream.Length);
var bound = ZStd.CompressBound((int)_recState.Buffer.Length);
var buf = ArrayPool<byte>.Shared.Rent(4 + bound);
var length = context.Compress2( buf.AsSpan(4, bound), stream.AsSpan());
BitConverter.TryWriteBytes(buf, (int)stream.Length);
WritePooledBytes(buf, 4 + length, dir, path / $"{_index++}.{Ext}");
var length = _recState.CompressionContext.Compress2(buf.AsSpan(4, bound), _recState.Buffer.AsSpan());
BitConverter.TryWriteBytes(buf, (int)_recState.Buffer.Length);
WritePooledBytes(
_recState,
ReplayZipFolder / $"{DataFilePrefix}{_recState.Index++}.{Ext}",
buf, 4 + length, CompressionLevel.NoCompression);
_currentUncompressedSize += (int)stream.Length;
_currentCompressedSize += length;
if (_currentUncompressedSize >= _maxUncompressedSize || _currentCompressedSize >= _maxCompressedSize)
_recState.UncompressedSize += (int)_recState.Buffer.Length;
_recState.CompressedSize += length;
if (_recState.UncompressedSize >= _maxUncompressedSize || _recState.CompressedSize >= _maxCompressedSize)
{
_sawmill.Info("Reached max replay recording size. Stopping recording.");
continueRecording = false;
}
if (continueRecording)
stream.SetLength(0);
_recState.Buffer.SetLength(0);
else
WriteFinalMetadata();
WriteFinalMetadata(_recState);
}
protected virtual void Reset()
{
if (_replay is var (stream, context))
{
stream.Dispose();
context.Dispose();
}
if (_recState == null)
return;
_replay = null;
_currentCompressedSize = 0;
_currentUncompressedSize = 0;
_index = 0;
_recordingEnd = null;
_directory = null;
_recState.CompressionContext.Dispose();
// File stream is always disposed from the worker task.
_recState.WriteCommandChannel.Complete();
_recState = null;
}
/// <summary>
/// Write general replay data required to read the rest of the replay. We write this at the beginning rather than at the end on the off-chance that something goes wrong along the way and the recording is incomplete.
/// </summary>
private void WriteInitialMetadata(string name)
private void WriteInitialMetadata(string name, RecordingState recState)
{
if (_directory is not var (dir, path))
return;
var (stringHash, stringData) = _serializer.GetStringSerializerPackage();
var extraData = new List<object>();
// Saving YAML data. This gets overwritten later anyways, this is mostly in case something goes wrong.
{
_yamlMetadata = new MappingDataNode();
_yamlMetadata[Time] = new ValueDataNode(DateTime.UtcNow.ToString(CultureInfo.InvariantCulture));
_yamlMetadata[Name] = new ValueDataNode(name);
var yamlMetadata = new MappingDataNode();
yamlMetadata[MetaKeyTime] = new ValueDataNode(DateTime.UtcNow.ToString(CultureInfo.InvariantCulture));
yamlMetadata[MetaKeyName] = new ValueDataNode(name);
// version info
_yamlMetadata[Engine] = new ValueDataNode(NetConf.GetCVar(CVars.BuildEngineVersion));
_yamlMetadata[Fork] = new ValueDataNode(NetConf.GetCVar(CVars.BuildForkId));
_yamlMetadata[ForkVersion] = new ValueDataNode(NetConf.GetCVar(CVars.BuildVersion));
yamlMetadata[MetaKeyEngineVersion] = new ValueDataNode(NetConf.GetCVar(CVars.BuildEngineVersion));
yamlMetadata[MetaKeyForkId] = new ValueDataNode(NetConf.GetCVar(CVars.BuildForkId));
yamlMetadata[MetaKeyForkVersion] = new ValueDataNode(NetConf.GetCVar(CVars.BuildVersion));
// Hash data
_yamlMetadata[Hash] = new ValueDataNode(Convert.ToHexString(_serializer.GetSerializableTypesHash()));
_yamlMetadata[Strings] = new ValueDataNode(Convert.ToHexString(stringHash));
_yamlMetadata[CompHash] = new ValueDataNode(Convert.ToHexString(_factory.GetHash(true)));
yamlMetadata[MetaKeyTypeHash] = new ValueDataNode(Convert.ToHexString(_serializer.GetSerializableTypesHash()));
yamlMetadata[MetaKeyStringHash] = new ValueDataNode(Convert.ToHexString(stringHash));
yamlMetadata[MetaKeyComponentHash] = new ValueDataNode(Convert.ToHexString(_factory.GetHash(true)));
// Time data
var timeBase = Timing.TimeBase;
_yamlMetadata[Tick] = new ValueDataNode(_recordingStart.Tick.Value.ToString());
_yamlMetadata[BaseTick] = new ValueDataNode(timeBase.Item2.Value.ToString());
_yamlMetadata[BaseTime] = new ValueDataNode(timeBase.Item1.Ticks.ToString());
_yamlMetadata[ServerTime] = new ValueDataNode(_recordingStart.Time.ToString());
yamlMetadata[MetaKeyStartTick] = new ValueDataNode(recState.StartTick.Value.ToString());
yamlMetadata[MetaKeyBaseTick] = new ValueDataNode(timeBase.Item2.Value.ToString());
yamlMetadata[MetaKeyBaseTime] = new ValueDataNode(timeBase.Item1.Ticks.ToString());
yamlMetadata[MetaKeyStartTime] = new ValueDataNode(recState.StartTime.ToString());
_yamlMetadata[IsClient] = new ValueDataNode(_netMan.IsClient.ToString());
yamlMetadata[MetaKeyIsClientRecording] = new ValueDataNode(_netMan.IsClient.ToString());
RecordingStarted?.Invoke(_yamlMetadata, extraData);
RecordingStarted?.Invoke(yamlMetadata, extraData);
var document = new YamlDocument(_yamlMetadata.ToYaml());
WriteYaml(document, dir, path / MetaFile);
var document = new YamlDocument(yamlMetadata.ToYaml());
WriteYaml(recState, ReplayZipFolder / FileMeta, document);
}
// Saving misc extra data like networked messages that typically get sent to newly connecting clients.
// TODO REPLAYS compression
// currently resource uploads are uncompressed, so this might be quite big.
if (extraData.Count > 0)
WriteSerializer(new ReplayMessage { Messages = extraData }, dir, path / InitFile);
WriteSerializer(recState, ReplayZipFolder / FileInit, new ReplayMessage { Messages = extraData });
// save data required for IRobustMappedStringSerializer
WriteBytes(stringData, dir, path / StringsFile);
WriteBytes(recState, ReplayZipFolder / FileStrings, stringData, CompressionLevel.NoCompression);
// Save replicated cvars.
var cvars = NetConf.GetReplicatedVars(true).Select(x => x.name);
WriteToml(cvars, dir, path / CvarFile );
WriteToml(recState, cvars, ReplayZipFolder / FileCvars);
}
private void WriteFinalMetadata()
private void WriteFinalMetadata(RecordingState recState)
{
if (_yamlMetadata == null || _directory is not var (dir, path))
return;
RecordingStopped?.Invoke(_yamlMetadata);
var time = Timing.CurTime - _recordingStart.Time;
_yamlMetadata[EndTick] = new ValueDataNode(Timing.CurTick.Value.ToString());
_yamlMetadata[Duration] = new ValueDataNode(time.ToString());
_yamlMetadata[FileCount] = new ValueDataNode(_index.ToString());
_yamlMetadata[Compressed] = new ValueDataNode(_currentCompressedSize.ToString());
_yamlMetadata[Uncompressed] = new ValueDataNode(_currentUncompressedSize.ToString());
_yamlMetadata[EndTime] = new ValueDataNode(Timing.CurTime.ToString());
var yamlMetadata = new MappingDataNode();
RecordingStopped?.Invoke(yamlMetadata);
var time = Timing.CurTime - recState.StartTime;
yamlMetadata[MetaFinalKeyEndTick] = new ValueDataNode(Timing.CurTick.Value.ToString());
yamlMetadata[MetaFinalKeyDuration] = new ValueDataNode(time.ToString());
yamlMetadata[MetaFinalKeyFileCount] = new ValueDataNode(recState.Index.ToString());
yamlMetadata[MetaFinalKeyCompressedSize] = new ValueDataNode(recState.CompressedSize.ToString());
yamlMetadata[MetaFinalKeyUncompressedSize] = new ValueDataNode(recState.UncompressedSize.ToString());
yamlMetadata[MetaFinalKeyEndTime] = new ValueDataNode(Timing.CurTime.ToString());
// this just overwrites the previous yml with additional data.
var document = new YamlDocument(_yamlMetadata.ToYaml());
WriteYaml(document, dir, path / MetaFile);
var document = new YamlDocument(yamlMetadata.ToYaml());
WriteYaml(recState, ReplayZipFolder / FileMetaFinal, document);
UpdateWriteTasks();
RecordingFinished?.Invoke(dir, path);
RecordingFinished?.Invoke(recState.DestDir, recState.DestPath);
Reset();
}
private void WriteContentBundleInfo(RecordingState recState)
{
if (!NetConf.GetCVar(CVars.ReplayMakeContentBundle))
return;
if (GetServerBuildInformation() is not { } info)
{
_sawmill.Warning("Missing necessary build information, replay will not be a launcher-runnable content bundle");
return;
}
var document = new JsonObject
{
["engine_version"] = info.EngineVersion,
["base_build"] = new JsonObject
{
["fork_id"] = info.ForkId,
["version"] = info.Version,
["download_url"] = info.ZipDownload,
["hash"] = info.ZipHash,
["manifest_download_url"] = info.ManifestDownloadUrl,
["manifest_url"] = info.ManifestUrl,
["manifest_hash"] = info.ManifestHash
}
};
var bytes = JsonSerializer.SerializeToUtf8Bytes(document);
WriteBytes(recState, new ResPath("rt_content_bundle.json"), bytes);
}
/// <summary>
/// Get information describing the server build.
/// This will be embedded in replay content bundles to allow the launcher to directly load them.
/// </summary>
/// <returns>null if we do not have build information.</returns>
protected GameBuildInformation? GetServerBuildInformation()
{
var info = GameBuildInformation.GetBuildInfoFromConfig(NetConf);
var zip = info.ZipDownload != null && info.ZipHash != null;
var manifest = info.ManifestHash != null && info.ManifestUrl != null && info.ManifestDownloadUrl != null;
if (!zip && !manifest)
{
// Don't have necessary info to write useful build info to the replay file.
return null;
}
return info;
}
public (float Minutes, int Ticks, float Size, float UncompressedSize) GetReplayStats()
{
if (!IsRecording)
return default;
if (_recState == null)
throw new InvalidOperationException("Not recording replay!");
var time = (Timing.CurTime - _recordingStart.Time).TotalMinutes;
var tick = Timing.CurTick.Value - _recordingStart.Tick.Value;
var size = _currentCompressedSize / (1024f * 1024f);
var altSize = _currentUncompressedSize / (1024f * 1024f);
var time = (Timing.CurTime - _recState.StartTime).TotalMinutes;
var tick = Timing.CurTick.Value - _recState.StartTick.Value;
var size = _recState.CompressedSize / (1024f * 1024f);
var altSize = _recState.UncompressedSize / (1024f * 1024f);
return ((float)time, (int)tick, size, altSize);
}
/// <summary>
/// Contains all state related to an active recording.
/// </summary>
private sealed class RecordingState
{
public readonly ZipArchive Zip;
public readonly MemoryStream Buffer;
public readonly ZStdCompressionContext CompressionContext;
public readonly ChannelWriter<Action> WriteCommandChannel;
public readonly Task WriteTask;
public readonly IWritableDirProvider DestDir;
public readonly ResPath DestPath;
// Tick and time when the recording was started.
public readonly GameTick StartTick;
public readonly TimeSpan StartTime;
// Optionally, the time the recording should automatically end at.
public readonly TimeSpan? EndTime;
public int Index;
public int CompressedSize;
public int UncompressedSize;
public RecordingState(
ZipArchive zip,
MemoryStream buffer,
ZStdCompressionContext compressionContext,
GameTick startTick,
TimeSpan startTime,
TimeSpan? endTime,
ChannelWriter<Action> writeCommandChannel,
Task writeTask,
IWritableDirProvider destDir,
ResPath destPath)
{
WriteTask = writeTask;
DestDir = destDir;
DestPath = destPath;
Zip = zip;
Buffer = buffer;
CompressionContext = compressionContext;
StartTick = startTick;
StartTime = startTime;
EndTime = endTime;
WriteCommandChannel = writeCommandChannel;
}
}
}

View File

@@ -0,0 +1,64 @@
using Robust.Shared.Configuration;
namespace Robust.Shared.Utility;
internal sealed record GameBuildInformation(
string EngineVersion,
string? ZipHash,
string? ZipDownload,
string ForkId,
string Version,
string? ManifestHash,
string? ManifestUrl,
string? ManifestDownloadUrl
)
{
public static GameBuildInformation GetBuildInfoFromConfig(IConfigurationManager cfg)
{
var zipHash = cfg.GetCVar(CVars.BuildHash);
var manifestHash = cfg.GetCVar(CVars.BuildManifestHash);
var forkId = cfg.GetCVar(CVars.BuildForkId);
var forkVersion = cfg.GetCVar(CVars.BuildVersion);
var manifestDownloadUrl = Interpolate(cfg.GetCVar(CVars.BuildManifestDownloadUrl));
var manifestUrl = Interpolate(cfg.GetCVar(CVars.BuildManifestUrl));
var zipDownload = Interpolate(cfg.GetCVar(CVars.BuildDownloadUrl));
if (zipDownload == "")
zipDownload = null;
if (zipHash == "")
zipHash = null;
if (manifestHash == "")
manifestHash = null;
if (manifestDownloadUrl == "")
manifestDownloadUrl = null;
if (manifestUrl == "")
manifestUrl = null;
return new GameBuildInformation(
cfg.GetCVar(CVars.BuildEngineVersion),
zipHash,
zipDownload,
forkId,
forkVersion,
manifestHash,
manifestUrl,
manifestDownloadUrl
);
string? Interpolate(string? value)
{
// Can't tell if splitting the ?. like this is more cursed than
// failing to align due to putting the full ?. on the next line
return value?
.Replace("{FORK_VERSION}", forkVersion)
.Replace("{FORK_ID}", forkId)
.Replace("{MANIFEST_HASH}", manifestHash)
.Replace("{ZIP_HASH}", zipHash);
}
}
}

View File

@@ -11,7 +11,7 @@ from typing import List
def main():
parser = argparse.ArgumentParser(description = "Tool for versioning RobustToolbox: commits the version config update and sets your local tag.")
parser.add_argument("version", help = "Version that will be written to tag. Format: 0.x.x.x")
parser.add_argument("version", help = "Version that will be written to tag. Format: x.x.x")
parser.add_argument("--file-only", action = "store_true", help = "Does not perform the Git part of the update (for writes only, not undos!)")
parser.add_argument("--undo", action = "store_true", help = "Macro to rebase over last commit and remove version tag. Version still required.")
@@ -29,15 +29,12 @@ def main():
def verify_version(version: str):
parts = version.split(".")
if len(parts) != 4:
print("Version must be split into four parts with '.'")
if len(parts) != 3:
print("Version must be split into three parts with '.'")
sys.exit(1)
for v in parts:
# this verifies parsability, exceptions here are expected for bad input
int(v)
if int(parts[0]) != 0:
print("Major version must be 0")
sys.exit(1)
def write_version(version: str, file_only: bool):
# Writing operation