mirror of
https://github.com/space-wizards/RobustToolbox.git
synced 2026-02-14 19:29:36 +01:00
Co-authored-by: metalgearsloth <31366439+metalgearsloth@users.noreply.github.com>
This commit is contained in:
@@ -26,8 +26,6 @@ using Robust.Shared.Log;
|
||||
using Robust.Shared.Map;
|
||||
using Robust.Shared.Network;
|
||||
using Robust.Shared.Network.Messages;
|
||||
using Robust.Shared.Physics;
|
||||
using Robust.Shared.Physics.Components;
|
||||
using Robust.Shared.Profiling;
|
||||
using Robust.Shared.Replays;
|
||||
using Robust.Shared.Timing;
|
||||
@@ -37,7 +35,7 @@ namespace Robust.Client.GameStates
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[UsedImplicitly]
|
||||
public sealed class ClientGameStateManager : IClientGameStateManager, IPostInjectInit
|
||||
public sealed class ClientGameStateManager : IClientGameStateManager
|
||||
{
|
||||
private GameStateProcessor _processor = default!;
|
||||
|
||||
@@ -82,6 +80,13 @@ namespace Robust.Client.GameStates
|
||||
|
||||
private ISawmill _sawmill = default!;
|
||||
|
||||
/// <summary>
|
||||
/// If we are waiting for a full game state from the server, we will automatically re-send full state requests
|
||||
/// if they do not arrive in time. Ideally this should never happen, this here just in case a client gets
|
||||
/// stuck waiting for a full state that the server doesn't know the client even wants.
|
||||
/// </summary>
|
||||
public static readonly TimeSpan FullStateTimeout = TimeSpan.FromSeconds(10);
|
||||
|
||||
/// <inheritdoc />
|
||||
public int MinBufferSize => _processor.MinBufferSize;
|
||||
|
||||
@@ -89,7 +94,8 @@ namespace Robust.Client.GameStates
|
||||
public int TargetBufferSize => _processor.TargetBufferSize;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int CurrentBufferSize => _processor.CalculateBufferSize(_timing.LastRealTick);
|
||||
public int GetApplicableStateCount() => _processor.GetApplicableStateCount();
|
||||
public int StateCount => _processor.StateCount;
|
||||
|
||||
public bool IsPredictionEnabled { get; private set; }
|
||||
public bool PredictionNeedsResetting { get; private set; }
|
||||
@@ -121,7 +127,8 @@ namespace Robust.Client.GameStates
|
||||
/// <inheritdoc />
|
||||
public void Initialize()
|
||||
{
|
||||
_processor = new GameStateProcessor(_timing);
|
||||
_sawmill = _logMan.GetSawmill("state");
|
||||
_processor = new GameStateProcessor(this, _timing, _sawmill);
|
||||
|
||||
_network.RegisterNetMessage<MsgState>(HandleStateMessage);
|
||||
_network.RegisterNetMessage<MsgStateLeavePvs>(HandlePvsLeaveMessage);
|
||||
@@ -245,9 +252,19 @@ namespace Robust.Client.GameStates
|
||||
/// <inheritdoc />
|
||||
public void ApplyGameState()
|
||||
{
|
||||
// If we have been waiting for a full state for a long time, re-request a full state.
|
||||
if (_processor.WaitingForFull
|
||||
&& _processor.LastFullStateRequested is {} last
|
||||
&& DateTime.UtcNow - last.Time > FullStateTimeout)
|
||||
{
|
||||
// Re-request a full state.
|
||||
// We use the previous from-tick, just in case the full state is already on the way,
|
||||
RequestFullState(null, last.Tick);
|
||||
}
|
||||
|
||||
// Calculate how many states we need to apply this tick.
|
||||
// Always at least one, but can be more based on StateBufferMergeThreshold.
|
||||
var curBufSize = CurrentBufferSize;
|
||||
var curBufSize = GetApplicableStateCount();
|
||||
var targetBufSize = TargetBufferSize;
|
||||
|
||||
var bufferOverflow = curBufSize - targetBufSize - StateBufferMergeThreshold;
|
||||
@@ -300,9 +317,9 @@ namespace Robust.Client.GameStates
|
||||
}
|
||||
|
||||
// If we were waiting for a new state, we are now applying it.
|
||||
if (_processor.LastFullStateRequested.HasValue)
|
||||
if (_processor.WaitingForFull)
|
||||
{
|
||||
_processor.LastFullStateRequested = null;
|
||||
_processor.OnFullStateReceived();
|
||||
_timing.LastProcessedTick = curState.ToSequence;
|
||||
DebugTools.Assert(curState.FromSequence == GameTick.Zero);
|
||||
PartialStateReset(curState, true);
|
||||
@@ -367,7 +384,7 @@ namespace Robust.Client.GameStates
|
||||
if (_processor.WaitingForFull)
|
||||
_timing.TickTimingAdjustment = 0f;
|
||||
else
|
||||
_timing.TickTimingAdjustment = (CurrentBufferSize - (float)TargetBufferSize) * 0.10f;
|
||||
_timing.TickTimingAdjustment = (GetApplicableStateCount() - (float)TargetBufferSize) * 0.10f;
|
||||
|
||||
// If we are about to process an another tick in the same frame, lets not bother unnecessarily running prediction ticks
|
||||
// Really the main-loop ticking just needs to be more specialized for clients.
|
||||
@@ -412,11 +429,11 @@ namespace Robust.Client.GameStates
|
||||
}
|
||||
}
|
||||
|
||||
public void RequestFullState(NetEntity? missingEntity = null)
|
||||
public void RequestFullState(NetEntity? missingEntity = null, GameTick? tick = null)
|
||||
{
|
||||
_sawmill.Info("Requesting full server state");
|
||||
_network.ClientSendMessage(new MsgStateRequestFull { Tick = _timing.LastRealTick , MissingEntity = missingEntity ?? NetEntity.Invalid });
|
||||
_processor.RequestFullState();
|
||||
_processor.OnFullStateRequested(tick ?? _timing.LastRealTick);
|
||||
}
|
||||
|
||||
public void PredictTicks(GameTick predictionTarget)
|
||||
@@ -1454,11 +1471,6 @@ namespace Robust.Client.GameStates
|
||||
|
||||
public bool IsQueuedForDetach(NetEntity entity)
|
||||
=> _processor.IsQueuedForDetach(entity);
|
||||
|
||||
void IPostInjectInit.PostInject()
|
||||
{
|
||||
_sawmill = _logMan.GetSawmill(CVars.NetPredict.Name);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class GameStateAppliedArgs : EventArgs
|
||||
|
||||
@@ -1,44 +1,31 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.Runtime.InteropServices;
|
||||
using Robust.Client.Timing;
|
||||
using Robust.Shared.GameObjects;
|
||||
using Robust.Shared.GameStates;
|
||||
using Robust.Shared.IoC;
|
||||
using Robust.Shared.Log;
|
||||
using Robust.Shared.Network.Messages;
|
||||
using Robust.Shared.Timing;
|
||||
using Robust.Shared.Utility;
|
||||
|
||||
namespace Robust.Client.GameStates
|
||||
{
|
||||
/// <inheritdoc />
|
||||
internal sealed class GameStateProcessor : IGameStateProcessor, IPostInjectInit
|
||||
internal sealed class GameStateProcessor : IGameStateProcessor
|
||||
{
|
||||
[Dependency] private ILogManager _logMan = default!;
|
||||
public const int MaxBufferSize = 512;
|
||||
|
||||
private readonly IClientGameTiming _timing;
|
||||
private readonly IClientGameStateManager _state;
|
||||
private readonly ISawmill _logger;
|
||||
|
||||
private readonly List<GameState> _stateBuffer = new();
|
||||
|
||||
private readonly Dictionary<GameTick, List<NetEntity>> _pvsDetachMessages = new();
|
||||
|
||||
private ISawmill _logger = default!;
|
||||
private ISawmill _stateLogger = default!;
|
||||
|
||||
public GameState? LastFullState { get; private set; }
|
||||
public bool WaitingForFull => LastFullStateRequested.HasValue;
|
||||
public GameTick? LastFullStateRequested
|
||||
{
|
||||
get => _lastFullStateRequested;
|
||||
set
|
||||
{
|
||||
_lastFullStateRequested = value;
|
||||
LastFullState = null;
|
||||
}
|
||||
}
|
||||
|
||||
public GameTick? _lastFullStateRequested = GameTick.Zero;
|
||||
public (GameTick Tick, DateTime Time)? LastFullStateRequested { get; private set; } = (GameTick.Zero, DateTime.MaxValue);
|
||||
|
||||
private int _bufferSize;
|
||||
|
||||
@@ -71,9 +58,12 @@ namespace Robust.Client.GameStates
|
||||
/// Constructs a new instance of <see cref="GameStateProcessor"/>.
|
||||
/// </summary>
|
||||
/// <param name="timing">Timing information of the current state.</param>
|
||||
public GameStateProcessor(IClientGameTiming timing)
|
||||
/// <param name="clientGameStateManager"></param>
|
||||
public GameStateProcessor(IClientGameStateManager state, IClientGameTiming timing, ISawmill logger)
|
||||
{
|
||||
_timing = timing;
|
||||
_state = state;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@@ -83,7 +73,7 @@ namespace Robust.Client.GameStates
|
||||
if (state.ToSequence <= _timing.LastRealTick)
|
||||
{
|
||||
if (Logging)
|
||||
_stateLogger.Debug($"Received Old GameState: lastRealTick={_timing.LastRealTick}, fSeq={state.FromSequence}, tSeq={state.ToSequence}, sz={state.PayloadSize}, buf={_stateBuffer.Count}");
|
||||
_logger.Debug($"Received Old GameState: lastRealTick={_timing.LastRealTick}, fSeq={state.FromSequence}, tSeq={state.ToSequence}, sz={state.PayloadSize}, buf={_stateBuffer.Count}");
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -95,7 +85,7 @@ namespace Robust.Client.GameStates
|
||||
continue;
|
||||
|
||||
if (Logging)
|
||||
_stateLogger.Debug($"Received Dupe GameState: lastRealTick={_timing.LastRealTick}, fSeq={state.FromSequence}, tSeq={state.ToSequence}, sz={state.PayloadSize}, buf={_stateBuffer.Count}");
|
||||
_logger.Debug($"Received Dupe GameState: lastRealTick={_timing.LastRealTick}, fSeq={state.FromSequence}, tSeq={state.ToSequence}, sz={state.PayloadSize}, buf={_stateBuffer.Count}");
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -104,13 +94,13 @@ namespace Robust.Client.GameStates
|
||||
if (!WaitingForFull)
|
||||
{
|
||||
// This is a good state that we will be using.
|
||||
_stateBuffer.Add(state);
|
||||
TryAdd(state);
|
||||
if (Logging)
|
||||
_stateLogger.Debug($"Received New GameState: lastRealTick={_timing.LastRealTick}, fSeq={state.FromSequence}, tSeq={state.ToSequence}, sz={state.PayloadSize}, buf={_stateBuffer.Count}");
|
||||
_logger.Debug($"Received New GameState: lastRealTick={_timing.LastRealTick}, fSeq={state.FromSequence}, tSeq={state.ToSequence}, sz={state.PayloadSize}, buf={_stateBuffer.Count}");
|
||||
return true;
|
||||
}
|
||||
|
||||
if (LastFullState == null && state.FromSequence == GameTick.Zero && state.ToSequence >= LastFullStateRequested!.Value)
|
||||
if (LastFullState == null && state.FromSequence == GameTick.Zero && state.ToSequence >= LastFullStateRequested!.Value.Tick)
|
||||
{
|
||||
LastFullState = state;
|
||||
|
||||
@@ -128,10 +118,44 @@ namespace Robust.Client.GameStates
|
||||
return false;
|
||||
}
|
||||
|
||||
_stateBuffer.Add(state);
|
||||
TryAdd(state);
|
||||
return true;
|
||||
}
|
||||
|
||||
public void TryAdd(GameState state)
|
||||
{
|
||||
if (_stateBuffer.Count <= MaxBufferSize)
|
||||
{
|
||||
_stateBuffer.Add(state);
|
||||
return;
|
||||
}
|
||||
|
||||
// This can happen if a required state gets dropped somehow and the client keeps receiving future
|
||||
// game states that they can't apply. I.e., GetApplicableStateCount() is zero, even though there are many
|
||||
// states in the list.
|
||||
//
|
||||
// This can seemingly happen when the server sends ""reliable"" game states while the client is paused?
|
||||
// For example, when debugging the client, while the server is running:
|
||||
// - The client stops sending acks for states that the server sends out.
|
||||
// - Thus the client will exceed the net.force_ack_threshold cvar
|
||||
// - The server starts sending some packets ""reliably"" and just force updates the clients last ack.
|
||||
//
|
||||
// What should happen is that when the client resumes, it receives the reliably sent states and can just
|
||||
// resume. However, even though the packets are sent ""reliably"", they just seem to get dropped.
|
||||
// I don't quite understand how/why yet, but this ensures the client doesn't get stuck.
|
||||
#if FULL_RELEASE
|
||||
_logger.Warning(@$"Exceeded maximum state buffer size!
|
||||
Tick: {_timing.CurTick}/{_timing.LastProcessedTick}/{_timing.LastRealTick}
|
||||
Size: {_stateBuffer.Count}
|
||||
Applicable states: {GetApplicableStateCount()}
|
||||
Was waiting for full: {WaitingForFull} {LastFullStateRequested}
|
||||
Had full state: {LastFullState != null}"
|
||||
);
|
||||
#endif
|
||||
|
||||
_state.RequestFullState();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attempts to get the current and next states to apply.
|
||||
/// </summary>
|
||||
@@ -152,7 +176,7 @@ namespace Robust.Client.GameStates
|
||||
"Tried to apply a non-extrapolated state that has too high of a FromSequence!");
|
||||
|
||||
if (Logging)
|
||||
_stateLogger.Debug($"Applying State: cTick={_timing.LastProcessedTick}, fSeq={curState.FromSequence}, tSeq={curState.ToSequence}, buf={_stateBuffer.Count}");
|
||||
_logger.Debug($"Applying State: cTick={_timing.LastProcessedTick}, fSeq={curState.FromSequence}, tSeq={curState.ToSequence}, buf={_stateBuffer.Count}");
|
||||
}
|
||||
|
||||
return applyNextState;
|
||||
@@ -344,14 +368,20 @@ namespace Robust.Client.GameStates
|
||||
{
|
||||
_stateBuffer.Clear();
|
||||
LastFullState = null;
|
||||
LastFullStateRequested = GameTick.Zero;
|
||||
LastFullStateRequested = (GameTick.Zero, DateTime.MaxValue);
|
||||
}
|
||||
|
||||
public void RequestFullState()
|
||||
public void OnFullStateRequested(GameTick tick)
|
||||
{
|
||||
_stateBuffer.Clear();
|
||||
LastFullState = null;
|
||||
LastFullStateRequested = _timing.LastRealTick;
|
||||
LastFullStateRequested = (tick, DateTime.UtcNow);
|
||||
}
|
||||
|
||||
public void OnFullStateReceived()
|
||||
{
|
||||
LastFullState = null;
|
||||
LastFullStateRequested = null;
|
||||
}
|
||||
|
||||
public void MergeImplicitData(Dictionary<NetEntity, Dictionary<ushort, ComponentState>> implicitData)
|
||||
@@ -416,10 +446,11 @@ namespace Robust.Client.GameStates
|
||||
return false;
|
||||
}
|
||||
|
||||
public int CalculateBufferSize(GameTick fromTick)
|
||||
public int GetApplicableStateCount(GameTick? fromTick = null)
|
||||
{
|
||||
fromTick ??= _timing.LastRealTick;
|
||||
bool foundState;
|
||||
var nextTick = fromTick;
|
||||
var nextTick = fromTick.Value;
|
||||
|
||||
do
|
||||
{
|
||||
@@ -437,13 +468,9 @@ namespace Robust.Client.GameStates
|
||||
}
|
||||
while (foundState);
|
||||
|
||||
return (int) (nextTick.Value - fromTick.Value);
|
||||
return (int) (nextTick.Value - fromTick.Value.Value);
|
||||
}
|
||||
|
||||
void IPostInjectInit.PostInject()
|
||||
{
|
||||
_logger = _logMan.GetSawmill("net");
|
||||
_stateLogger = _logMan.GetSawmill("net.state");
|
||||
}
|
||||
public int StateCount => _stateBuffer.Count;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,7 +32,15 @@ namespace Robust.Client.GameStates
|
||||
/// <summary>
|
||||
/// Number of applicable game states currently in the state buffer.
|
||||
/// </summary>
|
||||
int CurrentBufferSize { get; }
|
||||
int GetApplicableStateCount();
|
||||
|
||||
[Obsolete("use GetApplicableStateCount()")]
|
||||
int CurrentBufferSize => GetApplicableStateCount();
|
||||
|
||||
/// <summary>
|
||||
/// Total number of game states currently in the state buffer.
|
||||
/// </summary>
|
||||
int StateCount { get; }
|
||||
|
||||
/// <summary>
|
||||
/// If the buffer size is this many states larger than the target buffer size,
|
||||
@@ -91,7 +99,7 @@ namespace Robust.Client.GameStates
|
||||
/// <summary>
|
||||
/// Requests a full state from the server. This should override even implicit entity data.
|
||||
/// </summary>
|
||||
void RequestFullState(NetEntity? missingEntity = null);
|
||||
void RequestFullState(NetEntity? missingEntity = null, GameTick? tick = null);
|
||||
|
||||
uint SystemMessageDispatched<T>(T message) where T : EntityEventArgs;
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ namespace Robust.Client.GameStates
|
||||
/// This includes only applicable states. If there is a gap, future buffers are not included.
|
||||
/// </summary>
|
||||
/// <param name="fromTick">The tick to calculate from.</param>
|
||||
int CalculateBufferSize(GameTick fromTick);
|
||||
int GetApplicableStateCount(GameTick? fromTick);
|
||||
|
||||
bool TryGetLastServerStates(NetEntity entity,
|
||||
[NotNullWhen(true)] out Dictionary<ushort, ComponentState>? dictionary);
|
||||
|
||||
@@ -68,7 +68,7 @@ namespace Robust.Client.GameStates
|
||||
var lag = _netManager.ServerChannel!.Ping;
|
||||
|
||||
// calc interp info
|
||||
var buffer = _gameStateManager.CurrentBufferSize;
|
||||
var buffer = _gameStateManager.GetApplicableStateCount();
|
||||
|
||||
_totalHistoryPayload += sz;
|
||||
_history.Add((toSeq, sz, lag, buffer));
|
||||
@@ -268,7 +268,7 @@ namespace Robust.Client.GameStates
|
||||
handle.DrawString(_font, new Vector2(LeftMargin + width, lastLagY), $"{lastLagMs.ToString()}ms");
|
||||
|
||||
// buffer text
|
||||
handle.DrawString(_font, new Vector2(LeftMargin, height + LowerGraphOffset), $"{_gameStateManager.CurrentBufferSize.ToString()} states");
|
||||
handle.DrawString(_font, new Vector2(LeftMargin, height + LowerGraphOffset), $"{_gameStateManager.GetApplicableStateCount().ToString()} states");
|
||||
}
|
||||
|
||||
protected override void DisposeBehavior()
|
||||
|
||||
@@ -35,10 +35,16 @@ internal sealed partial class PvsSystem : EntitySystem
|
||||
public const float ChunkSize = 8;
|
||||
|
||||
// TODO make this a cvar. Make it in terms of seconds and tie it to tick rate?
|
||||
// Main issue is that I CBF figuring out the logic for handling it changing mid-game.
|
||||
public const int DirtyBufferSize = 20;
|
||||
// Note: If a client has ping higher than TickBuffer / TickRate, then the server will treat every entity as if it
|
||||
// had entered PVS for the first time. Note that due to the PVS budget, this buffer is easily overwhelmed.
|
||||
|
||||
/// <summary>
|
||||
/// See <see cref="CVars.NetForceAckThreshold"/>.
|
||||
/// </summary>
|
||||
public int ForceAckThreshold { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of pooled objects
|
||||
/// </summary>
|
||||
@@ -139,6 +145,7 @@ internal sealed partial class PvsSystem : EntitySystem
|
||||
|
||||
_configManager.OnValueChanged(CVars.NetPVS, SetPvs, true);
|
||||
_configManager.OnValueChanged(CVars.NetMaxUpdateRange, OnViewsizeChanged, true);
|
||||
_configManager.OnValueChanged(CVars.NetForceAckThreshold, OnForceAckChanged, true);
|
||||
|
||||
_serverGameStateManager.ClientAck += OnClientAck;
|
||||
_serverGameStateManager.ClientRequestFull += OnClientRequestFull;
|
||||
@@ -156,6 +163,7 @@ internal sealed partial class PvsSystem : EntitySystem
|
||||
|
||||
_configManager.UnsubValueChanged(CVars.NetPVS, SetPvs);
|
||||
_configManager.UnsubValueChanged(CVars.NetMaxUpdateRange, OnViewsizeChanged);
|
||||
_configManager.UnsubValueChanged(CVars.NetForceAckThreshold, OnForceAckChanged);
|
||||
|
||||
_serverGameStateManager.ClientAck -= OnClientAck;
|
||||
_serverGameStateManager.ClientRequestFull -= OnClientRequestFull;
|
||||
@@ -199,7 +207,7 @@ internal sealed partial class PvsSystem : EntitySystem
|
||||
// return last acked to pool, but only if it is not still in the OverflowDictionary.
|
||||
if (sessionData.LastAcked != null && !sessionData.SentEntities.ContainsKey(sessionData.LastAcked.Value.Tick))
|
||||
{
|
||||
DebugTools.Assert(sessionData.SentEntities.Values.Contains(sessionData.LastAcked.Value.Data));
|
||||
DebugTools.Assert(!sessionData.SentEntities.Values.Contains(sessionData.LastAcked.Value.Data));
|
||||
_visSetPool.Return(sessionData.LastAcked.Value.Data);
|
||||
}
|
||||
|
||||
@@ -212,6 +220,11 @@ internal sealed partial class PvsSystem : EntitySystem
|
||||
_viewSize = obj * 2;
|
||||
}
|
||||
|
||||
private void OnForceAckChanged(int value)
|
||||
{
|
||||
ForceAckThreshold = value;
|
||||
}
|
||||
|
||||
private void SetPvs(bool value)
|
||||
{
|
||||
_seenAllEnts.Clear();
|
||||
|
||||
@@ -347,7 +347,7 @@ Oldest acked clients: {string.Join(", ", players)}
|
||||
(entStates, deletions, fromTick) = _pvs.GetAllEntityStates(session, lastAck, _gameTiming.CurTick);
|
||||
}
|
||||
|
||||
var playerStates = _playerManager.GetPlayerStates(lastAck);
|
||||
var playerStates = _playerManager.GetPlayerStates(fromTick);
|
||||
|
||||
// lastAck varies with each client based on lag and such, we can't just make 1 global state and send it to everyone
|
||||
var lastInputCommand = inputSystem.GetLastInputCommand(session);
|
||||
@@ -368,11 +368,30 @@ Oldest acked clients: {string.Join(", ", players)}
|
||||
stateUpdateMessage.State = state;
|
||||
stateUpdateMessage.CompressionContext = resources.CompressionContext;
|
||||
|
||||
_networkManager.ServerSendMessage(stateUpdateMessage, channel);
|
||||
|
||||
// If the state is too big we let Lidgren send it reliably. This is to avoid a situation where a state is so
|
||||
// large that it (or part of it) consistently gets dropped. When we send reliably, we immediately update the
|
||||
// ack so that the next state will not also be huge.
|
||||
//
|
||||
// We also do this if the client's last ack is too old. This helps prevent things like the entity deletion
|
||||
// history from becoming too bloated if a bad client fails to send acks for whatever reason.
|
||||
|
||||
if (_gameTiming.CurTick.Value > lastAck.Value + _pvs.ForceAckThreshold)
|
||||
{
|
||||
stateUpdateMessage.ForceSendReliably = true;
|
||||
|
||||
// Aside from the time shortly after connecting, this shouldn't be common. If it is happening.
|
||||
// something is probably wrong (or we have a malicious client). Hence we log an error.
|
||||
// If it is more frequent than I think, this can be downgraded to a warning.
|
||||
|
||||
#if FULL_RELEASE
|
||||
var connectedTime = (DateTime.UtcNow - session.ConnectedTime).TotalMinutes;
|
||||
if (lastAck > GameTick.Zero && connectedTime > 1)
|
||||
_logger.Error($"Client {session} exceeded ack-tick threshold. Last ack: {lastAck}. Cur tick: {_gameTiming.CurTick}. Connect time: {connectedTime} minutes");
|
||||
#endif
|
||||
}
|
||||
|
||||
_networkManager.ServerSendMessage(stateUpdateMessage, channel);
|
||||
|
||||
if (stateUpdateMessage.ShouldSendReliably())
|
||||
{
|
||||
sessionData.LastReceivedAck = _gameTiming.CurTick;
|
||||
|
||||
@@ -172,6 +172,13 @@ namespace Robust.Shared
|
||||
public static readonly CVarDef<float> NetMaxUpdateRange =
|
||||
CVarDef.Create("net.maxupdaterange", 12.5f, CVar.ARCHIVE | CVar.REPLICATED | CVar.SERVER);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum allowed delay between the current tick and a client's last acknowledged tick before we send the
|
||||
/// next game state reliably and simply force update the acked tick,
|
||||
/// </summary>
|
||||
public static readonly CVarDef<int> NetForceAckThreshold =
|
||||
CVarDef.Create("net.force_ack_threshold", 60, CVar.ARCHIVE | CVar.SERVERONLY);
|
||||
|
||||
/// <summary>
|
||||
/// This limits the number of new entities that can be sent to a client in a single game state. This exists to
|
||||
/// avoid stuttering on the client when it has to spawn a bunch of entities in a single tick. If ever entity
|
||||
|
||||
@@ -95,6 +95,8 @@ namespace Robust.Shared.Network.Messages
|
||||
MsgSize = buffer.LengthBytes;
|
||||
}
|
||||
|
||||
public bool ForceSendReliably;
|
||||
|
||||
/// <summary>
|
||||
/// Whether this state message is large enough to warrant being sent reliably.
|
||||
/// This is only valid after
|
||||
@@ -103,7 +105,7 @@ namespace Robust.Shared.Network.Messages
|
||||
public bool ShouldSendReliably()
|
||||
{
|
||||
DebugTools.Assert(_hasWritten, "Attempted to determine sending method before determining packet size.");
|
||||
return MsgSize > ReliableThreshold;
|
||||
return ForceSendReliably || MsgSize > ReliableThreshold;
|
||||
}
|
||||
|
||||
public override NetDeliveryMethod DeliveryMethod
|
||||
|
||||
@@ -3,6 +3,7 @@ using NUnit.Framework;
|
||||
using Robust.Client.GameStates;
|
||||
using Robust.Client.Timing;
|
||||
using Robust.Shared.GameStates;
|
||||
using Robust.Shared.Log;
|
||||
using Robust.Shared.Timing;
|
||||
|
||||
namespace Robust.UnitTesting.Client.GameStates
|
||||
@@ -17,7 +18,9 @@ namespace Robust.UnitTesting.Client.GameStates
|
||||
timingMock.SetupProperty(p => p.CurTick);
|
||||
|
||||
var timing = timingMock.Object;
|
||||
var processor = new GameStateProcessor(timing);
|
||||
var managerMock = new Mock<IClientGameStateManager>();
|
||||
var logMock = new Mock<ISawmill>();
|
||||
var processor = new GameStateProcessor(managerMock.Object, timing, logMock.Object);
|
||||
processor.Interpolation = true;
|
||||
|
||||
processor.AddNewState(GameStateFactory(0, 1));
|
||||
@@ -37,7 +40,9 @@ namespace Robust.UnitTesting.Client.GameStates
|
||||
timingMock.SetupProperty(p => p.CurTick);
|
||||
|
||||
var timing = timingMock.Object;
|
||||
var processor = new GameStateProcessor(timing);
|
||||
var managerMock = new Mock<IClientGameStateManager>();
|
||||
var logMock = new Mock<ISawmill>();
|
||||
var processor = new GameStateProcessor(managerMock.Object, timing, logMock.Object);
|
||||
|
||||
processor.AddNewState(GameStateFactory(0, 1));
|
||||
processor.AddNewState(GameStateFactory(1, 2));
|
||||
@@ -64,7 +69,9 @@ namespace Robust.UnitTesting.Client.GameStates
|
||||
timingMock.SetupProperty(p => p.CurTick);
|
||||
|
||||
var timing = timingMock.Object;
|
||||
var processor = new GameStateProcessor(timing);
|
||||
var managerMock = new Mock<IClientGameStateManager>();
|
||||
var logMock = new Mock<ISawmill>();
|
||||
var processor = new GameStateProcessor(managerMock.Object, timing, logMock.Object);
|
||||
|
||||
processor.AddNewState(GameStateFactory(0, 1));
|
||||
processor.AddNewState(GameStateFactory(1, 2));
|
||||
@@ -177,13 +184,15 @@ namespace Robust.UnitTesting.Client.GameStates
|
||||
timingMock.SetupProperty(p => p.TickTimingAdjustment);
|
||||
|
||||
var timing = timingMock.Object;
|
||||
var processor = new GameStateProcessor(timing);
|
||||
var managerMock = new Mock<IClientGameStateManager>();
|
||||
var logMock = new Mock<ISawmill>();
|
||||
var processor = new GameStateProcessor(managerMock.Object, timing, logMock.Object);
|
||||
|
||||
processor.AddNewState(GameStateFactory(0, 1));
|
||||
processor.AddNewState(GameStateFactory(1, 2));
|
||||
processor.AddNewState(GameStateFactory(2, 3)); // buffer is now full, otherwise cannot calculate states.
|
||||
|
||||
processor.LastFullStateRequested = null;
|
||||
processor.OnFullStateReceived();
|
||||
timing.LastProcessedTick = timing.LastRealTick = new GameTick(1);
|
||||
|
||||
return (timing, processor);
|
||||
|
||||
@@ -830,7 +830,6 @@ namespace Robust.UnitTesting
|
||||
ClientIoC.RegisterIoC(GameController.DisplayMode.Headless, deps);
|
||||
deps.Register<INetManager, IntegrationNetManager>(true);
|
||||
deps.Register<IClientNetManager, IntegrationNetManager>(true);
|
||||
deps.Register<IGameStateProcessor, GameStateProcessor>(true);
|
||||
deps.Register<IClientGameTiming, ClientGameTiming>(true);
|
||||
deps.Register<IntegrationNetManager, IntegrationNetManager>(true);
|
||||
deps.Register<IModLoader, TestingModLoader>(true);
|
||||
|
||||
Reference in New Issue
Block a user