Skip to content

Commit

Permalink
feature/streamline logging and exception messages (#257)
Browse files Browse the repository at this point in the history
* fix: make log message string interpolated

* streamline log and exception messages
  • Loading branch information
chr-ber authored Feb 7, 2024
1 parent c94f1a1 commit 5d8f3ff
Show file tree
Hide file tree
Showing 9 changed files with 32 additions and 32 deletions.
14 changes: 7 additions & 7 deletions src/NuGet.Protocol.Catalog/CatalogClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public Task<CatalogIndex> GetIndexAsync(string indexUrl, CancellationToken token
var index = DeserializeUrlAsync<CatalogIndex>(indexUrl, token);
if (index is null)
{
throw new InvalidOperationException($"Url {indexUrl} didn't return a CatalogIndex.");
throw new InvalidOperationException($"URL '{indexUrl}' didn't return a CatalogIndex.");
}

return index!;
Expand All @@ -31,7 +31,7 @@ public Task<CatalogPage> GetPageAsync(string pageUrl, CancellationToken token)
var page = DeserializeUrlAsync<CatalogPage>(pageUrl, token);
if (page is null)
{
throw new InvalidOperationException($"Url {pageUrl} didn't return a CatalogPage.");
throw new InvalidOperationException($"URL '{pageUrl}' didn't return a CatalogPage.");
}

return page!;
Expand All @@ -41,7 +41,7 @@ public async Task<CatalogLeaf> GetLeafAsync(string leafUrl, CancellationToken to
{
// Buffer all of the JSON so we can parse twice. Once to determine the leaf type and once to deserialize
// the entire thing to the proper leaf type.
_logger.LogDebug("Downloading {leafUrl} as a byte array.", leafUrl);
_logger.LogDebug("Downloading '{leafUrl}' as a byte array.", leafUrl);
var jsonBytes = await _httpClient.GetByteArrayAsync(leafUrl);
var untypedLeaf = DeserializeBytes<CatalogLeaf>(jsonBytes);

Expand All @@ -67,12 +67,12 @@ public async Task<CatalogLeaf> GetAndValidateLeafAsync<T>(CatalogLeafType type,
{ "leafUrl", leafUrl},
}))
{
_logger.LogInformation("Getting package leaf: {type}, {leafUrl}", type, leafUrl);
_logger.LogInformation("Getting package leaf of '{type}' type for leaf URL '{leafUrl}'.", type, leafUrl);
var leaf = await DeserializeUrlAsync<T>(leafUrl, token);

if (leaf is null)
{
throw new InvalidOperationException("Leaf URL: {leafUrl} didn't return a valid leaf object.");
throw new InvalidOperationException($"Leaf URL '{leafUrl}' didn't return a valid leaf object.");
}

if (leaf.Type != type)
Expand All @@ -93,7 +93,7 @@ private static T DeserializeBytes<T>(byte[] jsonBytes)
var result = JsonSerializer.Deserialize<T>(jsonReader);
if (result == null)
{
throw new InvalidOperationException("Deserialization resulted in null");
throw new InvalidOperationException("Deserialization resulted in null.");
}

return result;
Expand All @@ -102,7 +102,7 @@ private static T DeserializeBytes<T>(byte[] jsonBytes)
private async Task<T?> DeserializeUrlAsync<T>(string documentUrl, CancellationToken token)
where T : class
{
_logger.LogDebug("Downloading {documentUrl} as a stream.", documentUrl);
_logger.LogDebug("Downloading '{documentUrl}' as a stream.", documentUrl);

using var response = await _httpClient.GetAsync(documentUrl, token);
await using var stream = await response.Content.ReadAsStreamAsync();
Expand Down
12 changes: 6 additions & 6 deletions src/NuGet.Protocol.Catalog/CatalogProcessor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public async Task ProcessAsync(CancellationToken token)
var minCommitTimestamp = await GetMinCommitTimestamp(token);

_logger.LogInformation(
"Using time bounds {min:O} (exclusive) to {max:O} (inclusive).",
"Using time bounds '{min:O}' (exclusive) to '{max:O}' (inclusive).",
minCommitTimestamp,
_settings.MaxCommitTimestamp);

Expand All @@ -82,7 +82,7 @@ private async Task ProcessIndexAsync(string catalogIndexUrl, DateTimeOffset minC
_settings.MaxCommitTimestamp);

_logger.LogInformation(
"{pages} pages were in the time bounds, out of {totalPages}.",
"'{pages}' out of '{totalPages}' pages were in the time bounds.",
pageItems.Count,
index.Items.Count);

Expand All @@ -107,7 +107,7 @@ private async Task ProcessPageAsync(DateTimeOffset minCommitTimestamp, CatalogPa
SentrySdk.GetSpan()?.SetTag("leafItemsCount", leafItems.Count.ToString());

_logger.LogInformation(
"On page {page}, {leaves} out of {totalLeaves} were in the time bounds.",
"On page '{page}', '{leaves}' out of '{totalLeaves}' were in the time bounds.",
pageItem.Url,
leafItems.Count,
page.Items.Count);
Expand Down Expand Up @@ -145,12 +145,12 @@ private async Task ProcessPageAsync(DateTimeOffset minCommitTimestamp, CatalogPa
else
{
// Lots of null leafs
_logger.LogInformation("Unsupported leaf type: {type}.", task.Result?.GetType());
_logger.LogInformation("Unsupported leaf type '{type}'.", task.Result?.GetType());
}
}
catch (Exception e)
{
_logger.LogError(e, "Failed to process {result}.", task.Result);
_logger.LogError(e, "Failed to process '{result}'.", task.Result);
}
}

Expand Down Expand Up @@ -188,7 +188,7 @@ private async Task<DateTimeOffset> GetMinCommitTimestamp(CancellationToken token

private async Task<string> GetCatalogIndexUrlAsync(CancellationToken token)
{
_logger.LogInformation("Getting catalog index URL from {serviceIndexUrl}.", _settings.ServiceIndexUrl);
_logger.LogInformation("Getting catalog index URL from '{serviceIndexUrl}'.", _settings.ServiceIndexUrl);
var sourceRepository = Repository.Factory.GetCoreV3(_settings.ServiceIndexUrl, FeedType.HttpV3);
var serviceIndexResource = await sourceRepository.GetResourceAsync<ServiceIndexResourceV3>(token);
var catalogIndexUrl = serviceIndexResource.GetServiceEntryUri(CatalogResourceType)?.AbsoluteUri;
Expand Down
4 changes: 2 additions & 2 deletions src/NuGetTrends.Data/BasePostgresContext.cs
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,12 @@ private void ConvertToSnake(INpgsqlNameTranslator mapper, object entity)
var dbName = indexKey.GetDatabaseName();
if (string.IsNullOrWhiteSpace(dbName))
{
throw new InvalidOperationException("Can't adjust casing, missing DB name");
throw new InvalidOperationException("Can't adjust casing, missing DB name.");
}
indexKey.SetDatabaseName(ConvertKeyToSnake(mapper, dbName));
break;
default:
throw new NotImplementedException("Unexpected type was provided to snake case converter");
throw new NotImplementedException($"Unexpected type '{entity.GetType().FullName}' was provided to snake case converter.");
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/NuGetTrends.Data/ConfigurationExtension.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ public static string GetNuGetTrendsConnectionString(this IConfiguration configur
var connString = configuration.GetConnectionString("NuGetTrends");
if (string.IsNullOrWhiteSpace(connString))
{
throw new InvalidOperationException("No connection string available for NuGetTrends");
throw new InvalidOperationException("No connection string available for section NuGetTrends.");
}

return connString;
Expand Down
2 changes: 1 addition & 1 deletion src/NuGetTrends.Scheduler/CatalogCursorStore.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public async Task SetAsync(DateTimeOffset value, CancellationToken token)
var cursor = await context.Cursors.FindAsync(_id, token);
if (cursor is null)
{
throw new InvalidOperationException($"Expected to find a cursor named '{CursorId}'");
throw new InvalidOperationException($"Expected to find a cursor named '{CursorId}'.");
}
cursor.Value = value;
context.Update(cursor);
Expand Down
4 changes: 2 additions & 2 deletions src/NuGetTrends.Scheduler/CatalogLeafProcessor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,13 @@ public async Task ProcessPackageDeleteAsync(PackageDeleteCatalogLeaf leaf, Cance
var deleted = await deletedItems.ToListAsync(token);
if (deleted.Count == 0)
{
_logger.LogDebug("Deleted event but not found with: {Id}, {Version}", leaf.PackageId, leaf.PackageVersion);
_logger.LogDebug("Deleted event but not found for '{id}' and '{version}'", leaf.PackageId, leaf.PackageVersion);
}
else
{
if (deleted.Count > 1)
{
_logger.LogError("Expected 1 item but found {count} for {id} and {version}.",
_logger.LogError("Expected 1 item but found '{count}' for '{id}' and '{version}'.",
deleted.Count,
leaf.PackageId,
leaf.PackageVersion);
Expand Down
6 changes: 3 additions & 3 deletions src/NuGetTrends.Scheduler/DailyDownloadPackageIdPublisher.cs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public async Task Import(IJobCancellationToken token)
autoDelete: false,
arguments: null);

logger.LogDebug("Queue creation OK with {QueueName}, {ConsumerCount}, {MessageCount}",
logger.LogDebug("Queue creation OK, with '{QueueName}', '{ConsumerCount}', '{MessageCount}'",
queueDeclareOk.QueueName, queueDeclareOk.ConsumerCount, queueDeclareOk.MessageCount);

var properties = channel.CreateBasicProperties();
Expand All @@ -60,7 +60,7 @@ public async Task Import(IJobCancellationToken token)

var batchSize = 25; // TODO: Configurable
var processBatchSpan = queueIdsSpan.StartChild("db.read",
$"Go through reader and queue in batches of {batchSize} ids");
$"Go through reader and queue in batches of '{batchSize}' ids.");
var batch = new List<string>(batchSize);
while (await reader.ReadAsync())
{
Expand Down Expand Up @@ -91,7 +91,7 @@ public async Task Import(IJobCancellationToken token)
}
finally
{
logger.LogInformation("Finished publishing messages. Messages queued: {count}", messageCount);
logger.LogInformation("Finished publishing messages. '{count}' messages queued.", messageCount);
}
transaction.Finish(SpanStatus.Ok);
}
Expand Down
18 changes: 9 additions & 9 deletions src/NuGetTrends.Scheduler/DailyDownloadWorker.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public class DailyDownloadWorker : IHostedService
private readonly INuGetSearchService _nuGetSearchService;
private readonly ILogger<DailyDownloadWorker> _logger;
private readonly CancellationTokenSource _cancellationTokenSource = new();
private readonly ConcurrentBag<(IModel,IConnection)> _connections = new();
private readonly ConcurrentBag<(IModel, IConnection)> _connections = new();

private readonly List<Task> _workers;

Expand Down Expand Up @@ -78,14 +78,14 @@ public Task StartAsync(CancellationToken cancellationToken)
if (attempt == maxAttempts)
{
connectSpan.Finish(e);
_logger.LogCritical(e, "Couldn't connect to the broker. Attempts: {attempts}",
_logger.LogCritical(e, "Couldn't connect to the broker. Attempt '{attempts}'.",
attempt);
throw;
}
var waitMs = attempt * 10000;
_logger.LogInformation(e,
"Failed to connect to the broker. Waiting for {waitMs} milliseconds. Attempt {attempts}",
"Failed to connect to the broker. Waiting for '{waitMs}' milliseconds. Attempt '{attempts}'.",
waitMs, attempt);
await Task.Delay(waitMs, cancellationToken);
connectSpan.SetTag("waited_ms", waitMs.ToString());
Expand Down Expand Up @@ -125,7 +125,7 @@ private void Connect()
autoDelete: false,
arguments: null);

_logger.LogDebug("Queue creation OK with {QueueName}, {ConsumerCount}, {MessageCount}",
_logger.LogDebug("Queue creation OK with '{QueueName}', '{ConsumerCount}' and '{MessageCount}'",
queueDeclareOk.QueueName, queueDeclareOk.ConsumerCount, queueDeclareOk.MessageCount);

var consumer = new AsyncEventingBasicConsumer(channel);
Expand All @@ -141,7 +141,7 @@ private void Connect()

defaultConsumer.Received += (s, e) =>
{
_logger.LogWarning("DefaultConsumer fired: {message}", Convert.ToBase64String(e.Body.ToArray()));
_logger.LogWarning("DefaultConsumer fired message '{message}'.", Convert.ToBase64String(e.Body.ToArray()));
};

channel.DefaultConsumer = defaultConsumer;
Expand All @@ -156,15 +156,15 @@ private async Task OnConsumerOnReceived(object sender, BasicDeliverEventArgs ea)
try
{
var body = ea.Body;
_logger.LogDebug("Received message with body size: {size}", body.Length);
_logger.LogDebug("Received message with body size '{size}'.", body.Length);
SentrySdk.ConfigureScope(s => s.SetTag("msg_size", body.Length.ToString()));
var deserializationSpan = batchProcessSpan.StartChild("json.deserialize");
packageIds = MessagePackSerializer.Deserialize<List<string>>(body);
deserializationSpan.Finish(SpanStatus.Ok);

if (packageIds == null)
{
throw new InvalidOperationException($"Deserializing {body} resulted in a null reference.");
throw new InvalidOperationException($"Deserializing '{body}' resulted in a null reference.");
}

var updateCountSpan = batchProcessSpan.StartChild("update.download.count", "Updates the DB with the current daily downloads");
Expand Down Expand Up @@ -231,7 +231,7 @@ private async Task UpdateDownloadCount(IList<string> packageIds, ISpan parentSpa
{
// All versions are unlisted or:
// "This package has been deleted from the gallery. It is no longer available for install/restore."
_logger.LogInformation("Package deleted: {packageId}", expectedPackageId);
_logger.LogInformation("Package with id '{packageId}' deleted.", expectedPackageId);
await RemovePackage(context, expectedPackageId, _cancellationTokenSource.Token);
}
else
Expand Down Expand Up @@ -299,7 +299,7 @@ private async Task RemovePackage(NuGetTrendsContext context, string packageId, C
if (package.Count == 0)
{
// This happens a lot.
_logger.LogInformation("Package with Id {packageId} not found!.", packageId);
_logger.LogInformation("Package with id '{packageId}' not found!.", packageId);
return;
}

Expand Down
2 changes: 1 addition & 1 deletion src/NuGetTrends.Scheduler/NuGetSearchService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public class NuGetSearchService(ILogger<NuGetSearchService> logger) : INuGetSear

if (package == null)
{
logger.LogDebug("Package {packageId} not found.", packageId);
logger.LogDebug("Package with id '{packageId}' not found.", packageId);
}

return package;
Expand Down

0 comments on commit 5d8f3ff

Please sign in to comment.