diff --git a/LinkCrawler/LinkCrawler.Tests/LinkCrawler.Tests.csproj b/LinkCrawler/LinkCrawler.Tests/LinkCrawler.Tests.csproj
index 2f7ce1b..4cdd9fb 100644
--- a/LinkCrawler/LinkCrawler.Tests/LinkCrawler.Tests.csproj
+++ b/LinkCrawler/LinkCrawler.Tests/LinkCrawler.Tests.csproj
@@ -30,6 +30,15 @@
4
+
+ ..\packages\AutoFixture.4.5.0\lib\net452\AutoFixture.dll
+
+
+ ..\packages\AutoFixture.AutoMoq.4.5.0\lib\net452\AutoFixture.AutoMoq.dll
+
+
+ ..\packages\Fare.2.1.1\lib\net35\Fare.dll
+
..\packages\Moq.4.2.1510.2205\lib\net40\Moq.dll
True
@@ -39,6 +48,7 @@
True
+
@@ -50,9 +60,11 @@
+
-
+
+
@@ -60,13 +72,18 @@
-
+
+
+
{db53303b-f9fb-4d77-b656-d05db0420e6a}
LinkCrawler
+
+
+
+
-
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/LinkCrawler/LinkCrawler/LinkCrawler.cs b/LinkCrawler/LinkCrawler/LinkCrawler.cs
index 1751888..5e3378a 100644
--- a/LinkCrawler/LinkCrawler/LinkCrawler.cs
+++ b/LinkCrawler/LinkCrawler/LinkCrawler.cs
@@ -7,7 +7,8 @@
using RestSharp;
using System;
using System.Collections.Generic;
-using LinkCrawler.Utils.Outputs;
+using System.Diagnostics;
+using System.Linq;
namespace LinkCrawler
{
@@ -19,8 +20,9 @@ public class LinkCrawler
public IEnumerable Outputs { get; set; }
public IValidUrlParser ValidUrlParser { get; set; }
public bool OnlyReportBrokenLinksToOutput { get; set; }
- public static List VisitedUrlList { get; set; }
+ public static List UrlList;
private ISettings _settings;
+ private Stopwatch _timer;
public LinkCrawler(IEnumerable outputs, IValidUrlParser validUrlParser, ISettings settings)
{
@@ -28,14 +30,17 @@ public LinkCrawler(IEnumerable outputs, IValidUrlParser validUrlParser,
Outputs = outputs;
ValidUrlParser = validUrlParser;
CheckImages = settings.CheckImages;
- VisitedUrlList = new List();
+ UrlList = new List();
RestRequest = new RestRequest(Method.GET).SetHeader("Accept", "*/*");
OnlyReportBrokenLinksToOutput = settings.OnlyReportBrokenLinksToOutput;
_settings = settings;
+ _timer = new Stopwatch();
}
public void Start()
{
+ _timer.Start();
+ UrlList.Add(new LinkModel(BaseUrl));
SendRequest(BaseUrl);
}
@@ -68,10 +73,13 @@ public void CrawlForLinksInResponse(IResponseModel responseModel)
foreach (var url in linksFoundInMarkup)
{
- if (VisitedUrlList.Contains(url))
- continue;
+ lock (UrlList)
+ {
+ if (UrlList.Where(l => l.Address == url).Count() > 0)
+ continue;
- VisitedUrlList.Add(url);
+ UrlList.Add(new LinkModel(url));
+ }
SendRequest(url, responseModel.RequestedUrl);
}
}
@@ -92,6 +100,54 @@ public void WriteOutput(IResponseModel responseModel)
output.WriteInfo(responseModel);
}
}
+
+ CheckIfFinal(responseModel);
+ }
+
+ private void CheckIfFinal(IResponseModel responseModel)
+ {
+ lock (UrlList)
+ {
+
+ // First set the status code for the completed link (this will set "CheckingFinished" to true)
+ foreach (LinkModel lm in UrlList.Where(l => l.Address == responseModel.RequestedUrl))
+ {
+ lm.StatusCode = responseModel.StatusCodeNumber;
+ }
+
+ // Then check to see whether there are any pending links left to check
+ if ((UrlList.Count > 1) && (UrlList.Where(l => l.CheckingFinished == false).Count() == 0))
+ {
+ FinaliseSession();
+ }
+ }
+ }
+
+ private void FinaliseSession()
+ {
+ _timer.Stop();
+ if (_settings.PrintSummary)
+ {
+ List messages = new List();
+ messages.Add(""); // add blank line to differentiate summary from main output
+
+ messages.Add("Processing complete. Checked " + UrlList.Count() + " links in " + _timer.ElapsedMilliseconds.ToString() + "ms");
+
+ messages.Add("");
+ messages.Add(" Status | # Links");
+ messages.Add(" -------+--------");
+
+ IEnumerable> StatusSummary = UrlList.GroupBy(link => link.StatusCode, link => link.Address);
+ foreach(IGrouping statusGroup in StatusSummary)
+ {
+ messages.Add(String.Format(" {0} | {1,5}", statusGroup.Key, statusGroup.Count()));
+ }
+
+ foreach (var output in Outputs)
+ {
+ output.WriteInfo(messages.ToArray());
+ }
+ }
}
}
}
\ No newline at end of file
diff --git a/LinkCrawler/LinkCrawler/LinkCrawler.csproj b/LinkCrawler/LinkCrawler/LinkCrawler.csproj
index 56ff5bc..210c55a 100644
--- a/LinkCrawler/LinkCrawler/LinkCrawler.csproj
+++ b/LinkCrawler/LinkCrawler/LinkCrawler.csproj
@@ -77,12 +77,15 @@
+
+
+
diff --git a/LinkCrawler/LinkCrawler/Models/LinkModel.cs b/LinkCrawler/LinkCrawler/Models/LinkModel.cs
new file mode 100644
index 0000000..2f246fd
--- /dev/null
+++ b/LinkCrawler/LinkCrawler/Models/LinkModel.cs
@@ -0,0 +1,35 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace LinkCrawler.Models
+{
+ public class LinkModel
+ {
+ public string Address { get; private set; }
+ public bool CheckingFinished { get; private set; }
+ private int _statusCode;
+
+ public int StatusCode
+ {
+ get
+ {
+ return _statusCode;
+ }
+ set
+ {
+ _statusCode = value;
+ CheckingFinished = true;
+ }
+ }
+
+ public LinkModel (string address)
+ {
+ Address = address;
+ CheckingFinished = false;
+ }
+
+ }
+}
diff --git a/LinkCrawler/LinkCrawler/Program.cs b/LinkCrawler/LinkCrawler/Program.cs
index 824e4af..1c4ca96 100644
--- a/LinkCrawler/LinkCrawler/Program.cs
+++ b/LinkCrawler/LinkCrawler/Program.cs
@@ -1,6 +1,8 @@
using LinkCrawler.Utils;
using StructureMap;
using System;
+using LinkCrawler.Utils.Parsers;
+using LinkCrawler.Utils.Settings;
namespace LinkCrawler
{
@@ -8,9 +10,18 @@ class Program
{
static void Main(string[] args)
{
+
using (var container = Container.For())
{
var linkCrawler = container.GetInstance();
+ if (args.Length > 0)
+ {
+ string parsed;
+ var validUrlParser = new ValidUrlParser(new Settings());
+ var result = validUrlParser.Parse(args[0], out parsed);
+ if(result)
+ linkCrawler.BaseUrl = parsed;
+ }
linkCrawler.Start();
Console.Read();
}
diff --git a/LinkCrawler/LinkCrawler/Utils/Extensions/StringExtensions.cs b/LinkCrawler/LinkCrawler/Utils/Extensions/StringExtensions.cs
index d9298ca..658976e 100644
--- a/LinkCrawler/LinkCrawler/Utils/Extensions/StringExtensions.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Extensions/StringExtensions.cs
@@ -20,5 +20,15 @@ public static bool ToBool(this string str)
bool.TryParse(str, out parsed);
return parsed;
}
+
+ public static string TrimEnd(this string input, string suffixToRemove)
+ {
+ if (input != null && suffixToRemove != null
+ && input.EndsWith(suffixToRemove))
+ {
+ return input.Substring(0, input.Length - suffixToRemove.Length);
+ }
+ return input;
+ }
}
}
diff --git a/LinkCrawler/LinkCrawler/Utils/Extensions/UriExtensions.cs b/LinkCrawler/LinkCrawler/Utils/Extensions/UriExtensions.cs
new file mode 100644
index 0000000..8b62be9
--- /dev/null
+++ b/LinkCrawler/LinkCrawler/Utils/Extensions/UriExtensions.cs
@@ -0,0 +1,14 @@
+using System;
+
+namespace LinkCrawler.Utils.Extensions
+{
+ public static class UriExtensions
+ {
+ public static string RemoveSegments(this Uri uri)
+ {
+ var uriString = uri.ToString();
+ var segments = string.Join(string.Empty, uri.Segments);
+ return uriString.TrimEnd(segments);
+ }
+ }
+}
diff --git a/LinkCrawler/LinkCrawler/Utils/Outputs/ConsoleOutput.cs b/LinkCrawler/LinkCrawler/Utils/Outputs/ConsoleOutput.cs
index 1301c28..ece38a6 100644
--- a/LinkCrawler/LinkCrawler/Utils/Outputs/ConsoleOutput.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Outputs/ConsoleOutput.cs
@@ -13,7 +13,12 @@ public void WriteError(IResponseModel responseModel)
public void WriteInfo(IResponseModel responseModel)
{
- Console.WriteLine(responseModel.ToString());
+ WriteInfo(new string[] { responseModel.ToString() });
+ }
+
+ public void WriteInfo(String[] Info)
+ {
+ foreach(string line in Info) Console.WriteLine(line);
}
}
}
diff --git a/LinkCrawler/LinkCrawler/Utils/Outputs/CsvOutput.cs b/LinkCrawler/LinkCrawler/Utils/Outputs/CsvOutput.cs
index 6e59dff..2c1d695 100644
--- a/LinkCrawler/LinkCrawler/Utils/Outputs/CsvOutput.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Outputs/CsvOutput.cs
@@ -1,14 +1,14 @@
-using System;
-using System.IO;
-using LinkCrawler.Models;
+using LinkCrawler.Models;
using LinkCrawler.Utils.Settings;
+using System;
+using System.IO;
namespace LinkCrawler.Utils.Outputs
{
public class CsvOutput : IOutput, IDisposable
{
private readonly ISettings _settings;
- private StreamWriter _writer;
+ public TextWriter _writer;
public CsvOutput(ISettings settings)
{
@@ -20,8 +20,10 @@ private void Setup()
{
var fileMode = _settings.CsvOverwrite ? FileMode.Create : FileMode.Append;
var file = new FileStream(_settings.CsvFilePath, fileMode, FileAccess.Write);
- _writer = new StreamWriter(file);
+ var streamWriter = new StreamWriter(file) {AutoFlush = true};
+ _writer = TextWriter.Synchronized(streamWriter);
+
if (fileMode == FileMode.Create)
{
_writer.WriteLine("Code{0}Status{0}Url{0}Referer", _settings.CsvDelimiter);
@@ -38,6 +40,11 @@ public void WriteInfo(IResponseModel responseModel)
Write(responseModel);
}
+ public void WriteInfo(String[] Info)
+ {
+ // Do nothing - string info is only for console
+ }
+
private void Write(IResponseModel responseModel)
{
_writer?.WriteLine("{1}{0}{2}{0}{3}{0}{4}",
diff --git a/LinkCrawler/LinkCrawler/Utils/Outputs/IOutput.cs b/LinkCrawler/LinkCrawler/Utils/Outputs/IOutput.cs
index c924c13..4dcd64e 100644
--- a/LinkCrawler/LinkCrawler/Utils/Outputs/IOutput.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Outputs/IOutput.cs
@@ -6,5 +6,6 @@ public interface IOutput
{
void WriteError(IResponseModel responseModel);
void WriteInfo(IResponseModel responseModel);
+ void WriteInfo(string[] InfoString);
}
}
diff --git a/LinkCrawler/LinkCrawler/Utils/Outputs/SlackOutput.cs b/LinkCrawler/LinkCrawler/Utils/Outputs/SlackOutput.cs
index 9454a69..143c05f 100644
--- a/LinkCrawler/LinkCrawler/Utils/Outputs/SlackOutput.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Outputs/SlackOutput.cs
@@ -14,12 +14,17 @@ public SlackOutput(ISlackClient slackClient)
public void WriteError(IResponseModel responseModel)
{
- _slackClient.NotifySlack(responseModel);
+ _slackClient.NotifySlack(responseModel);
}
public void WriteInfo(IResponseModel responseModel)
{
// Write nothing to Slack
}
+
+ public void WriteInfo(string[] Info)
+ {
+ // Write nothing to Slack
+ }
}
}
diff --git a/LinkCrawler/LinkCrawler/Utils/Parsers/ValidUrlParser.cs b/LinkCrawler/LinkCrawler/Utils/Parsers/ValidUrlParser.cs
index 20f95a3..54d9097 100644
--- a/LinkCrawler/LinkCrawler/Utils/Parsers/ValidUrlParser.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Parsers/ValidUrlParser.cs
@@ -12,7 +12,8 @@ public class ValidUrlParser : IValidUrlParser
public ValidUrlParser(ISettings settings)
{
Regex = new Regex(settings.ValidUrlRegex);
- BaseUrl = settings.BaseUrl;
+ var baseUri = new Uri(settings.BaseUrl);
+ BaseUrl = baseUri.RemoveSegments();
}
public bool Parse(string url, out string validUrl)
diff --git a/LinkCrawler/LinkCrawler/Utils/Settings/Constants.cs b/LinkCrawler/LinkCrawler/Utils/Settings/Constants.cs
index 33d4adc..434ad5b 100644
--- a/LinkCrawler/LinkCrawler/Utils/Settings/Constants.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Settings/Constants.cs
@@ -17,6 +17,7 @@ public static class AppSettings
public const string CsvDelimiter = "Csv.Delimiter";
public const string SuccessHttpStatusCodes = "SuccessHttpStatusCodes";
public const string OutputProviders = "outputProviders";
+ public const string PrintSummary = "PrintSummary";
}
public static class Response
diff --git a/LinkCrawler/LinkCrawler/Utils/Settings/ISettings.cs b/LinkCrawler/LinkCrawler/Utils/Settings/ISettings.cs
index 8f82816..cc03c8f 100644
--- a/LinkCrawler/LinkCrawler/Utils/Settings/ISettings.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Settings/ISettings.cs
@@ -27,5 +27,7 @@ public interface ISettings
string CsvDelimiter { get; }
bool IsSuccess(HttpStatusCode statusCode);
+
+ bool PrintSummary { get; }
}
}
diff --git a/LinkCrawler/LinkCrawler/Utils/Settings/MockSettings.cs b/LinkCrawler/LinkCrawler/Utils/Settings/MockSettings.cs
new file mode 100644
index 0000000..3f576d8
--- /dev/null
+++ b/LinkCrawler/LinkCrawler/Utils/Settings/MockSettings.cs
@@ -0,0 +1,44 @@
+using LinkCrawler.Utils.Extensions;
+using System.Net;
+
+namespace LinkCrawler.Utils.Settings {
+ public class MockSettings : ISettings {
+
+ public string BaseUrl => "https://github.com";
+
+ public bool CheckImages => true;
+
+ public string CsvDelimiter => ";";
+
+ public string CsvFilePath => @"C:\tmp\output.csv";
+
+ public bool CsvOverwrite => true;
+
+ public bool OnlyReportBrokenLinksToOutput => false;
+
+ public string SlackWebHookBotIconEmoji => ":homer:";
+
+ public string SlackWebHookBotMessageFormat => "*Doh! There is a link not working* Url: {0} Statuscode: {1} The link is placed on this page: {2}";
+
+ public string SlackWebHookBotName => "Homer Bot";
+ public bool PrintSummary => false;
+ private bool IncludeWebHookUrl { get; set; }
+ public string SlackWebHookUrl
+ {
+ get
+ {
+ return IncludeWebHookUrl ? @"https://hooks.slack.com/services/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" : "";
+ }
+ }
+
+ public string ValidUrlRegex => @"(^http[s]?:\/{2})|(^www)|(^\/{1,2})";
+
+ public bool IsSuccess(HttpStatusCode statusCode) {
+ return statusCode.IsSuccess("1xx,2xx,3xx");
+ }
+
+ public MockSettings(bool includeWebHookUrl) {
+ this.IncludeWebHookUrl = includeWebHookUrl;
+ }
+ }
+}
diff --git a/LinkCrawler/LinkCrawler/Utils/Settings/Settings.cs b/LinkCrawler/LinkCrawler/Utils/Settings/Settings.cs
index b9a6c7d..a560270 100644
--- a/LinkCrawler/LinkCrawler/Utils/Settings/Settings.cs
+++ b/LinkCrawler/LinkCrawler/Utils/Settings/Settings.cs
@@ -39,6 +39,9 @@ public class Settings : ISettings
public string CsvDelimiter =>
ConfigurationManager.AppSettings[Constants.AppSettings.CsvDelimiter];
+ public bool PrintSummary =>
+ ConfigurationManager.AppSettings[Constants.AppSettings.PrintSummary].ToBool();
+
public bool IsSuccess(HttpStatusCode statusCode)
{
var configuredCodes = ConfigurationManager.AppSettings[Constants.AppSettings.SuccessHttpStatusCodes] ?? "";
diff --git a/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/LinkCrawler.Test.csproj b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/LinkCrawler.Test.csproj
new file mode 100644
index 0000000..9928ef5
--- /dev/null
+++ b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/LinkCrawler.Test.csproj
@@ -0,0 +1,24 @@
+
+
+
+ net7.0
+ enable
+ enable
+
+ false
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
diff --git a/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/UnitTest1.cs b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/UnitTest1.cs
new file mode 100644
index 0000000..c35856a
--- /dev/null
+++ b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/UnitTest1.cs
@@ -0,0 +1,11 @@
+namespace LinkCrawler.Test
+{
+ public class UnitTest1
+ {
+ [Fact]
+ public void Test1()
+ {
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/Usings.cs b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/Usings.cs
new file mode 100644
index 0000000..8c927eb
--- /dev/null
+++ b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.Test/Usings.cs
@@ -0,0 +1 @@
+global using Xunit;
\ No newline at end of file
diff --git a/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.sln b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.sln
new file mode 100644
index 0000000..be9dde9
--- /dev/null
+++ b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler.sln
@@ -0,0 +1,25 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.4.33103.184
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LinkCrawler", "LinkCrawler\LinkCrawler.csproj", "{DD0C6451-84A7-4DA6-8BF5-32EB97BE32FE}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {DD0C6451-84A7-4DA6-8BF5-32EB97BE32FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {DD0C6451-84A7-4DA6-8BF5-32EB97BE32FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {DD0C6451-84A7-4DA6-8BF5-32EB97BE32FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {DD0C6451-84A7-4DA6-8BF5-32EB97BE32FE}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {0815F252-9C5A-42C9-A1CC-743850B55836}
+ EndGlobalSection
+EndGlobal
diff --git a/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler/LinkCrawler.csproj b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler/LinkCrawler.csproj
new file mode 100644
index 0000000..f02677b
--- /dev/null
+++ b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler/LinkCrawler.csproj
@@ -0,0 +1,10 @@
+
+
+
+ Exe
+ net7.0
+ enable
+ enable
+
+
+
diff --git a/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler/Program.cs b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler/Program.cs
new file mode 100644
index 0000000..3751555
--- /dev/null
+++ b/LinkCrawler_DotNet_7/LinkCrawler/LinkCrawler/Program.cs
@@ -0,0 +1,2 @@
+// See https://aka.ms/new-console-template for more information
+Console.WriteLine("Hello, World!");
diff --git a/README.md b/README.md
index bb68713..65d28a0 100644
--- a/README.md
+++ b/README.md
@@ -1,15 +1,21 @@
# LinkCrawler
-Simple C# console application that will crawl the given webpage for image-tags and hyperlinks. If some of them is not working, info will be sent to output.
+Simple C# console application that will crawl the given webpage for broken image-tags and hyperlinks. The result of this will be written to output. Right now we have these outputs: console, csv, slack.
+
+
+## Why?
+Because it could be useful to know when a webpage you have responsibility for displays broken links to it's users. I have this running continuously, but you don't have to. For instance, after upgrading your CMS, changing database-scheme, migrating content etc, it can be relevant to know if this did or did not not introduce broken links. Just run this tool one time and you will know exactly how many links are broken, where they link to, and where they are located.
+
+## Build
+Clone repo :point_right: open solution in Visual Studio :point_right: build :facepunch:
+
+AppVeyor is used as CI, so when code is pushed to this repo the solution will get built and all tests will be run.
| Branch | Build status |
| :----- | :---------------------------------------|
| develop | [](https://ci.appveyor.com/project/hmol/linkcrawler/branch/develop) |
| master | [](https://ci.appveyor.com/project/hmol/linkcrawler/branch/master) |
-## Why?
-Because it could be useful to know when a webpage you have responsibility for displays broken links to it's users. I have this running continuously, but you don't have to. For instance, after upgrading your CMS, changing database-scheme, migrating content etc, it can be relevant to know if this did or did not not introduce broken links. Just run this tool one time and you will know exactly how many links are broken, where they link to, and where they are located.
-
-## App.Settings
+## AppSettings
| Key | Usage |
| :-------------------------- | :---------------------------------------|
@@ -22,16 +28,12 @@ Because it could be useful to know when a webpage you have responsibility for di
| ```Slack.WebHook.Bot.IconEmoji``` | Custom Emoji for slack bot |
| ```OnlyReportBrokenLinksToOutput``` | If true, only broken links will be reported to output. |
| ```Slack.WebHook.Bot.MessageFormat``` | String format message that will be sent to slack |
-| ```Csv.Enabled``` | Enable/disable CSV output |
| ```Csv.FilePath``` | File path for the CSV file |
| ```Csv.Overwrite``` | Whether to overwrite or append (if file exists) |
-| ```Csv.Delimiter ``` | Delimiter between columns in the CSV file (like ',' or ';') |
-
-## Build
-Clone repo :point_right: open solution in Visual Studio :point_right: build :facepunch:
+| ```Csv.Delimiter``` | Delimiter between columns in the CSV file (like ',' or ';') |
+| ```PrintSummary``` | If true, a summary will be printed when all links have been checked. |
-## Output to console
-
+Ther also is a `````` that controls what output should be used.
## Output to file
```LinkCrawler.exe >> crawl.log``` will save output to file.